blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5c49d4ef1e8ec65c666469c3d1f5055e5808aa1e | 63327e8feb2166ee49d92e874655fa8244cf4d4a | /astropy/wcs/wcsapi/conftest.py | bf89f8cb5ced72d84d11004ac7817ea5f11ffba3 | [
"BSD-3-Clause"
] | permissive | bmerry/astropy | 29c317f806138fd830f1b10fd5a33c98f09be6a0 | 437ef257f317c099613bbac78def59f7b011fa09 | refs/heads/master | 2023-02-14T15:35:38.246389 | 2020-06-24T12:09:21 | 2020-06-24T12:09:21 | 275,123,787 | 0 | 0 | BSD-3-Clause | 2020-06-26T09:53:11 | 2020-06-26T09:53:11 | null | UTF-8 | Python | false | false | 3,958 | py | import pytest
import numpy as np
from astropy.coordinates import SkyCoord
from astropy.units import Quantity
from astropy.wcs import WCS
from astropy.wcs.wcsapi import BaseLowLevelWCS
@pytest.fixture
def spectral_1d_fitswcs():
wcs = WCS(naxis=1)
wcs.wcs.ctype = 'FREQ',
wcs.wcs.cunit = 'Hz',
wcs.wcs.cdelt = 3.e9,
wcs.wcs.crval = 4.e9,
wcs.wcs.crpix = 11.,
wcs.wcs.cname = 'Frequency',
return wcs
@pytest.fixture
def time_1d_fitswcs():
wcs = WCS(naxis=1)
wcs.wcs.ctype = 'TIME',
wcs.wcs.mjdref = (30042, 0)
wcs.wcs.crval = 3.,
wcs.wcs.crpix = 11.,
wcs.wcs.cname = 'Time',
wcs.wcs.cunit = 's'
return wcs
@pytest.fixture
def celestial_2d_fitswcs():
wcs = WCS(naxis=2)
wcs.wcs.ctype = 'RA---CAR', 'DEC--CAR'
wcs.wcs.cunit = 'deg', 'deg'
wcs.wcs.cdelt = -2., 2.
wcs.wcs.crval = 4., 0.
wcs.wcs.crpix = 6., 7.
wcs.wcs.cname = 'Right Ascension', 'Declination'
wcs.pixel_shape = (6, 7)
wcs.pixel_bounds = [(-1, 5), (1, 7)]
return wcs
@pytest.fixture
def spectral_cube_3d_fitswcs():
wcs = WCS(naxis=3)
wcs.wcs.ctype = 'RA---CAR', 'DEC--CAR', 'FREQ'
wcs.wcs.cunit = 'deg', 'deg', 'Hz'
wcs.wcs.cdelt = -2., 2., 3.e9
wcs.wcs.crval = 4., 0., 4.e9
wcs.wcs.crpix = 6., 7., 11.
wcs.wcs.cname = 'Right Ascension', 'Declination', 'Frequency'
wcs.pixel_shape = (6, 7, 3)
wcs.pixel_bounds = [(-1, 5), (1, 7), (1, 2.5)]
return wcs
class Spectral1DLowLevelWCS(BaseLowLevelWCS):
@property
def pixel_n_dim(self):
return 1
@property
def world_n_dim(self):
return 1
@property
def world_axis_physical_types(self):
return 'em.freq',
@property
def world_axis_units(self):
return 'Hz',
@property
def world_axis_names(self):
return 'Frequency',
_pixel_shape = None
@property
def pixel_shape(self):
return self._pixel_shape
@pixel_shape.setter
def pixel_shape(self, value):
self._pixel_shape = value
_pixel_bounds = None
@property
def pixel_bounds(self):
return self._pixel_bounds
@pixel_bounds.setter
def pixel_bounds(self, value):
self._pixel_bounds = value
def pixel_to_world_values(self, pixel_array):
return np.asarray(pixel_array - 10) * 3e9 + 4e9
def world_to_pixel_values(self, world_array):
return np.asarray(world_array - 4e9) / 3e9 + 10
@property
def world_axis_object_components(self):
return ('test', 0, 'value'),
@property
def world_axis_object_classes(self):
return {'test': (Quantity, (), {'unit': 'Hz'})}
@pytest.fixture
def spectral_1d_ape14_wcs():
return Spectral1DLowLevelWCS()
class Celestial2DLowLevelWCS(BaseLowLevelWCS):
@property
def pixel_n_dim(self):
return 2
@property
def world_n_dim(self):
return 2
@property
def world_axis_physical_types(self):
return 'pos.eq.ra', 'pos.eq.dec'
@property
def world_axis_units(self):
return 'deg', 'deg'
@property
def world_axis_names(self):
return 'Right Ascension', 'Declination'
@property
def pixel_shape(self):
return (6, 7)
@property
def pixel_bounds(self):
return (-1, 5), (1, 7)
def pixel_to_world_values(self, px, py):
return (-(np.asarray(px) - 5.) * 2 + 4.,
(np.asarray(py) - 6.) * 2)
def world_to_pixel_values(self, wx, wy):
return (-(np.asarray(wx) - 4.) / 2 + 5.,
np.asarray(wy) / 2 + 6.)
@property
def world_axis_object_components(self):
return [('test', 0, 'spherical.lon.degree'),
('test', 1, 'spherical.lat.degree')]
@property
def world_axis_object_classes(self):
return {'test': (SkyCoord, (), {'unit': 'deg'})}
@pytest.fixture
def celestial_2d_ape14_wcs():
return Celestial2DLowLevelWCS()
| [
"stuart@cadair.com"
] | stuart@cadair.com |
5288343d98f93fa840d34d6794760c05753643ff | 6a3992a6150e134e542407f0593a2b29a6135828 | /recursion/recursion.py | d013796cad75bc54a5eb6357bc05943a5b2b65d6 | [] | no_license | ravinaNG/python | a44ffa4bb6cf43d3b9da5e43476e52dc5e0ca764 | 0f12f22bebf9ebe1a6bfb4f12da9e10faa1abc36 | refs/heads/master | 2020-06-17T13:47:01.264850 | 2019-08-17T07:25:00 | 2019-08-17T07:25:00 | 195,941,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | def sum1(n):
if (n==0):
return 0
return sum1(n-1) + n
print sum1(6) | [
"ravina18@navgurukul.org"
] | ravina18@navgurukul.org |
1b0c462d03f9dd859f73c4ff724cf7035687f484 | 50bec959bbddcb255efefeb7477c5c0193e74301 | /workflowengine/workflowserializers/UserFlowSerializer.py | eea7302dd2c39ad02ee11d66e1ded5efc4d1e84d | [] | no_license | rupin/WorkflowEngine | bd7805f04c688967a92dfc5b5341b275ad798338 | bf1c250ba69ac3b1cef4a5f2c4fe0ff46660db68 | refs/heads/master | 2022-04-26T10:43:09.868851 | 2020-05-24T10:10:42 | 2020-05-24T10:10:42 | 202,849,528 | 6 | 2 | null | 2022-04-22T23:16:38 | 2019-08-17T07:22:28 | Python | UTF-8 | Python | false | false | 492 | py | from workflowengine.models.UserFlowModel import UserFlow
from rest_framework import serializers
from workflowengine.workflowserializers.FlowSerializer import FlowSerializer
from workflowengine.workflowserializers.CustomUserSerializer import CustomUserSerializer
from django.conf import settings
class UserFlowSerializer(serializers.ModelSerializer):
created_at = serializers.DateTimeField(format=settings.SITE_DATE_FORMAT, read_only=True)
class Meta:
model = UserFlow
fields = "__all__" | [
"rupin.chheda@gmail.com"
] | rupin.chheda@gmail.com |
f73af99cb6cc9e31bde95659d290c53144101545 | aadad415f425b9f45fed14290235488a46687a4f | /2011/avanzato/trunk/relazione-labo-mmm/calibrazione/potenziometro_temperatura.py | 89845915aeb14f920354c218967b218515af5abe | [] | no_license | enucatl-university/lab-unipd | c1fdae198ccc3af3f75ad07554e148427a9cc096 | c197bb92f479913c1183375fa22fd1619e6bbad4 | refs/heads/master | 2023-08-15T01:59:55.502505 | 2016-11-11T19:20:13 | 2016-11-11T19:20:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 871 | py | #!/usr/bin/env python
#coding=utf-8
from __future__ import division, print_function
import sys
from math import sqrt
from potenziometro_resistenza import PotenziometroResistenza
from resistenza_temperatura import ResistenzaTemperatura
class PotenziometroTemperatura:
def __init__(self, fit_file, tabella_pt, p):
self.c = PotenziometroResistenza(fit_file, p)
self.t = ResistenzaTemperatura(self.c.R, self.c.sigma_R, tabella_pt)
self.T = self.t.T
self.sigma_T = self.t.sigma_T
def __str__(self):
return "{0:.2f} \pm {1:.2f}".format(self.T, self.sigma_T)
if __name__ == "__main__":
fit_file = sys.argv[1]
p = float(sys.argv[2])
try:
file_tabella = sys.argv[3]
except IndexError:
file_tabella = "tabellaPT"
PT = PotenziometroTemperatura(fit_file, file_tabella, p)
print(PT)
| [
"gmatteo..abis@gmail.com"
] | gmatteo..abis@gmail.com |
25879cfcf48bc23fda52ebd265166f73220dd9d7 | 3432efd194137e1d0cb05656eb547c9992229f02 | /笔记/201711/星爷/day06/01test.py | 7be1476cd30d75ce3fded67c775a8ec18be7a855 | [] | no_license | zhanganxia/other_code | 31747d7689ae1e91fcf3f9f758df130246e7d495 | 8d09d9d0b6d6a1a9b8755487f926ac6fafd761fa | refs/heads/master | 2021-09-04T02:22:38.632685 | 2018-01-14T15:37:14 | 2018-01-14T15:37:14 | 107,007,482 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 157 | py | def line_conf(a,b):
def line(x):
return a*x + b
return line
line1 = line_conf(1,1)
line2 = line_conf(4,5)
print(line1(5))
print(line2(5))
| [
"kk@kk.rhel.cc"
] | kk@kk.rhel.cc |
561b2d15b9611936240e066b5a95c425078ce872 | 577b7d942174a1b799e9020bf8cf8c1087ec150b | /flask_app/match_score.py | 3dfcd4e50e6e9ebac1ca02559693042eef569a42 | [] | no_license | Crazy-Ideas/ipl2021 | 35b92569ed41e58d98ad3f19bad42c1d92efde2b | 6481126572437d00f804211f7982bde67af1f505 | refs/heads/master | 2023-07-17T10:50:25.448965 | 2021-09-04T04:23:31 | 2021-09-04T04:23:31 | 352,253,039 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,661 | py | from typing import Dict, List, Tuple
from firestore_ci import FirestoreDocument
from config import Config
class ScoreData:
def __init__(self, score_data: Dict):
self.score_data = score_data
def get_float_value(self, pid: str, key: str, score_type: str) -> float:
player_record = next((player for team in self.score_data[score_type] for player in team["scores"]
if player["pid"] == pid), dict())
if not player_record or key not in player_record:
return 0.0
try:
value = float(player_record[key])
except ValueError:
value = 0.0
return value
def get_int_value(self, pid: str, key: str, score_type: str) -> int:
return int(self.get_float_value(pid, key, score_type))
def get_string_value(self, pid: str, key: str, score_type: str) -> str:
player_record = next((player for team in self.score_data[score_type] for player in team["scores"]
if player["pid"] == pid), dict())
if not player_record or key not in player_record:
return str()
return str(player_record[key])
def get_runs(self, pid: str) -> int:
return self.get_int_value(pid, "R", "batting")
def get_fours(self, pid: str) -> int:
return self.get_int_value(pid, "4s", "batting")
def get_sixes(self, pid: str) -> int:
return self.get_int_value(pid, "6s", "batting")
def get_balls(self, pid: str) -> int:
return self.get_int_value(pid, "B", "batting")
def get_strike_rate(self, pid: str) -> float:
return self.get_float_value(pid, "SR", "batting")
def get_dismissal(self, pid: str) -> str:
dismissal = self.get_string_value(pid, "dismissal", "batting")
return str() if dismissal == "not out" else dismissal
def get_overs(self, pid: str) -> float:
return self.get_float_value(pid, "O", "bowling")
def get_wickets(self, pid: str) -> int:
return self.get_int_value(pid, "W", "bowling")
def get_economy_rate(self, pid: str) -> float:
return self.get_float_value(pid, "Econ", "bowling")
def get_maidens(self, pid: str) -> int:
return self.get_int_value(pid, "M", "bowling")
def get_catches(self, pid: str) -> int:
return self.get_int_value(pid, "catch", "fielding")
def get_stumpings(self, pid: str) -> int:
return self.get_int_value(pid, "stumped", "fielding")
def get_runouts(self, pid: str) -> int:
return self.get_int_value(pid, "runout", "fielding")
def get_lbw(self, pid: str) -> int:
return self.get_int_value(pid, "lbw", "fielding")
def get_bowled(self, pid: str) -> int:
return self.get_int_value(pid, "bowledSS", "fielding")
def get_man_of_the_match(self, pid: str) -> bool:
if "man-of-the-match" not in self.score_data:
return False
if "pid" not in self.score_data["man-of-the-match"]:
return False
if pid != self.score_data["man-of-the-match"]["pid"]:
return False
return True
def get_playing_xi(self) -> List[Tuple[str, str]]:
return [(player["pid"], player["name"]) for team in self.score_data["team"] for player in team["players"]]
class MatchPlayer(FirestoreDocument):
def __init__(self):
super().__init__()
self.player_id: str = str()
self.player_name: str = str()
self.player_type: str = str()
self.team: str = str()
self.match_id: str = str()
self.owner: str = str()
self.gameweek: int = int()
self.type: str = Config.NORMAL
# Batting attributes
self.runs: int = int()
self.fours: int = int()
self.sixes: int = int()
self.balls: int = int()
self.strike_rate: float = float()
self.dismissal: str = str() # Empty string if player is not out
# Bowling attributes
self.overs: float = float()
self.wickets: int = int()
self.economy_rate: float = float()
self.maiden: int = int()
self.lbw: int = int()
self.bowled: int = int()
# Fielding attributes
self.catch: int = int()
self.stumping: int = int()
self.runout: int = int()
# General attributes
self.man_of_the_match: bool = bool()
self.in_playing_xi: bool = bool()
@property
def batting_points(self) -> int:
score = self.runs + self.fours * 1 + self.sixes * 2
if self.runs >= 30:
score += 4
if self.runs >= 50:
score += 8
if self.runs >= 100:
score += 16
if self.runs == 0 and self.dismissal != str() and self.player_type and self.player_type not in Config.BOWLERS:
score -= 2
if self.balls >= 10:
if self.strike_rate > 170.0:
score += 6
elif self.strike_rate > 150.0:
score += 4
elif self.strike_rate >= 130.0:
score += 2
elif self.strike_rate < 50.0:
score -= 6
elif self.strike_rate < 60.0:
score -= 4
elif self.strike_rate <= 70.0:
score -= 2
return score
@property
def bowling_points(self) -> int:
score = self.wickets * 25 + self.maiden * 12 + self.lbw * 8 + self.bowled * 8
if self.wickets >= 3:
score += 4
if self.wickets >= 4:
score += 8
if self.wickets >= 5:
score += 16
if self.overs >= 2.0:
if self.economy_rate < 5.0:
score += 6
elif self.economy_rate < 6.0:
score += 4
elif self.economy_rate <= 7.0:
score += 2
elif self.economy_rate > 12.0:
score -= 6
elif self.economy_rate > 11.0:
score -= 4
elif self.economy_rate >= 10.0:
score -= 2
return score
@property
def fielding_points(self) -> int:
score = self.catch * 10 + self.stumping * 10 + self.runout * 10
if self.catch + self.stumping + self.runout >= 3:
score += 4
return score
@property
def man_of_the_match_points(self) -> int:
return 10 if self.man_of_the_match else 0
@property
def playing_xi_points(self) -> int:
return 4 if self.in_playing_xi else 0
@property
def general_points(self) -> int:
return self.man_of_the_match_points + self.playing_xi_points
@property
def total_points(self) -> int:
return self.batting_points + self.bowling_points + self.general_points + self.fielding_points
@property
def adjusted_points(self) -> float:
if self.type == Config.CAPTAIN:
return float(self.total_points * 2)
elif self.type == Config.SUB:
return self.total_points / 2
return float(self.total_points)
@property
def display_class(self) -> str:
if self.type == Config.CAPTAIN:
return "table-success"
elif self.type == Config.SUB:
return "table-danger"
return str()
@property
def owner_full_name(self) -> str:
return Config.USER_LIST.get(self.owner.upper(), str())
def update_scores(self, score_data: ScoreData):
# Batting attributes
self.runs = score_data.get_runs(self.player_id)
self.fours = score_data.get_fours(self.player_id)
self.sixes = score_data.get_sixes(self.player_id)
self.balls: int = score_data.get_balls(self.player_id)
self.strike_rate: float = score_data.get_strike_rate(self.player_id)
self.dismissal: str = score_data.get_dismissal(self.player_id)
# Bowling attributes
self.overs = score_data.get_overs(self.player_id)
self.wickets = score_data.get_wickets(self.player_id)
self.economy_rate = score_data.get_economy_rate(self.player_id)
self.maiden: int = score_data.get_maidens(self.player_id)
self.lbw: int = score_data.get_lbw(self.player_id)
self.bowled: int = score_data.get_bowled(self.player_id)
# Fielding attributes
self.catch: int = score_data.get_catches(self.player_id)
self.stumping: int = score_data.get_stumpings(self.player_id)
self.runout: int = score_data.get_runouts(self.player_id)
# General attributes
self.man_of_the_match = score_data.get_man_of_the_match(self.player_id)
self.in_playing_xi = True
MatchPlayer.init("match_players")
| [
"nayan@crazyideas.co.in"
] | nayan@crazyideas.co.in |
f8af275a225c509612e09d977869c62d2deda5be | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/pa3/benchmarks/prime-86.py | 579635d38e5b51be6d926171616f072a4a327384 | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | # Get the n-th prime starting from 2
def get_prime(n:int) -> int:
candidate:int = 2
found:int = 0
while True:
if is_prime(candidate):
found = found + 1
if found == n:
return candidate
candidate = candidate + $Exp
return 0 # Never happens
def is_prime(x:int) -> bool:
div:int = 2
while div < x:
if x % div == 0:
return False
div = div + 1
return True
# Input parameter
n:int = 15
# Run [1, n]
i:int = 1
# Crunch
while i <= n:
print(get_prime(i))
i = i + 1
| [
"647530+Virtlink@users.noreply.github.com"
] | 647530+Virtlink@users.noreply.github.com |
458996ee7b14a8721fdcab21afb7075fdc55aa48 | 84c1e780a349c4bae2d6cf4c1da72889d5222797 | /Statistics/Day_6_The Central Limit Theorem II/the_central_limit_theorem_ii.py | dc0b8e850078bec0c972d4511669fc112d3c477a | [
"MIT"
] | permissive | brianchiang-tw/HackerRank | 18e31583b10cf2189adac97e7cb2997d46790bcd | 02a30a0033b881206fa15b8d6b4ef99b2dc420c8 | refs/heads/master | 2020-09-23T23:18:08.253868 | 2020-02-13T14:16:22 | 2020-02-13T14:16:22 | 225,612,833 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | from math import erf
def normal_distribution_cdf( x, miu, sigma):
z_score = ( x - miu )/ sigma
cdf_of_x = ( 1 + erf( z_score / (2 ** 0.5) ) ) / 2
return cdf_of_x
def to_percentage( ratio ):
return ratio * 100.0
if __name__ == '__main__':
max_ticket = 250
num_of_student = 100
avg_tck_per_student = 2.4
std_tck_per_student = 2
avg_of_sample_sum = num_of_student * avg_tck_per_student
std_of_sample_sum = (num_of_student ** 0.5) * std_tck_per_student
under_limit = normal_distribution_cdf( max_ticket, avg_of_sample_sum, std_of_sample_sum)
print( round(under_limit, 4) ) | [
"brianchiang1988@icloud.com"
] | brianchiang1988@icloud.com |
f0b57eae419c94d4ae3469be8f30b508875e23b3 | 06a7dc7cc93d019e4a9cbcf672b23a0bbacf8e8b | /2016_schizConnect/2018_analysis_2ndpart_clinic/clustering_based_on_PCs/corrected/correction_age_sex_site/3_clusters_solution/01_controls_clusters_correlations.py | 323e2bf01fa45a50b33ae435e1229d711b1ef8d5 | [] | no_license | neurospin/scripts | 6c06cd218a5f32de9c3c2b7d1d8bda3f3d107458 | f14a2c9cf2cd7f5fbea767b017c3faf36d170bdb | refs/heads/master | 2021-07-11T22:55:46.567791 | 2021-07-02T13:08:02 | 2021-07-02T13:08:02 | 10,549,286 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 7,802 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 11 17:02:41 2018
@author: ad247405
"""
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import nibabel as nib
import pandas as pd
import nibabel as nib
import json
from nilearn import plotting
from nilearn import image
from scipy.stats.stats import pearsonr
import shutil
import scipy.stats
import matplotlib.pyplot as plt
import seaborn as sns
import parsimony.utils.check_arrays as check_arrays
from sklearn import preprocessing
import statsmodels.api as sm
from statsmodels.formula.api import ols
import seaborn as sns
DATA_PATH = "/neurospin/brainomics/2016_schizConnect/2018_analysis_2ndpart_clinic/data"
INPUT_CLINIC_FILENAME = "/neurospin/abide/schizConnect/data/december_2017_clinical_score/schizconnect_NUSDAST_assessmentData_4495.csv"
site = np.load("/neurospin/brainomics/2016_schizConnect/analysis/all_studies+VIP/VBM/all_subjects/data/site.npy")
pop = pd.read_csv("/neurospin/brainomics/2016_schizConnect/analysis/all_studies+VIP/VBM/all_subjects/population.csv")
clinic = pd.read_csv(INPUT_CLINIC_FILENAME)
pop= pop[pop["site_num"]==3]
age = pop["age"].values
sex = pop["sex_num"].values
y = np.load("/neurospin/brainomics/2016_schizConnect/analysis/all_studies+VIP/VBM/all_subjects/data/y.npy")
labels_cluster = np.load("/neurospin/brainomics/2016_schizConnect/\
2018_analysis_2ndpart_clinic/results/clustering/corrected_results/\
correction_age_sex_site/3_clusters_solution/with_controls/labels_all.npy")
labels_cluster = labels_cluster
df_scores = pd.DataFrame()
df_scores["subjectid"] = pop.subjectid
for score in clinic.question_id.unique():
df_scores[score] = np.nan
for s in pop.subjectid:
curr = clinic[clinic.subjectid ==s]
for key in clinic.question_id.unique():
if curr[curr.question_id == key].empty == False:
df_scores.loc[df_scores["subjectid"]== s,key] = curr[curr.question_id == key].question_value.values[0]
################################################################################
df_stats = pd.DataFrame(columns=["T","p"])
df_stats.insert(0,"clinical_scores",clinic.question_id.unique())
################################################################################
output = "/neurospin/brainomics/2016_schizConnect/2018_analysis_2ndpart_clinic/\
results/clustering/corrected_results/correction_age_sex_site/3_clusters_solution/\
with_controls/controls_clusters_clinics_p_values.csv"
for key in clinic.question_id.unique():
try:
neurospycho = df_scores[key].astype(np.float).values
df = pd.DataFrame()
df[key] = neurospycho[np.array(np.isnan(neurospycho)==False)]
df["labels"]=labels_cluster[np.array(np.isnan(neurospycho)==False)]
T,p = scipy.stats.f_oneway(df[df["labels"]=="controls"][key],\
df[df["labels"]==0][key],\
df[df["labels"]==1][key],\
df[df["labels"]==2][key])
print(p)
df_stats.loc[df_stats.clinical_scores==key,"T"] = T
df_stats.loc[df_stats.clinical_scores==key,"p"] = p
except:
print("issue")
df_stats.loc[df_stats.clinical_scores==key,"T"] = np.nan
df_stats.loc[df_stats.clinical_scores==key,"p"] = np.nan
df_stats.to_csv(output)
################################################################################
output = "/neurospin/brainomics/2016_schizConnect/2018_analysis_2ndpart_clinic/\
results/clustering/corrected_results/correction_age_sex_site/3_clusters_solution/with_controls/nudast"
df = pd.DataFrame()
score = df_scores["vocabsca"].astype(np.float).values
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
LABELS_DICT = {"controls":"controls",0: "cluster 1", 1: "cluster 2", 2: "cluster 3"}
df["labels_name"] = df["labels"].map(LABELS_DICT)
df["vocabsca"] = score[np.array(np.isnan(score)==False)]
T,p = scipy.stats.f_oneway(df[df["labels"]=="controls"]["vocabsca"],\
df[df["labels"]==0]["vocabsca"],\
df[df["labels"]==1]["vocabsca"],\
df[df["labels"]==2]["vocabsca"])
ax = sns.violinplot(x="labels_name", y="vocabsca", data=df,order=["controls","cluster 1","cluster 2","cluster 3"])
plt.title("ANOVA: t = %s, and p= %s"%(T,p))
plt.savefig(os.path.join(output,"vocabsca.png"))
df = pd.DataFrame()
score = df_scores["cvlfps"].astype(np.float).values
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
LABELS_DICT = {"controls":"controls",0: "cluster 1", 1: "cluster 2", 2: "cluster 3"}
df["labels_name"] = df["labels"].map(LABELS_DICT)
df["cvlfps"] = score[np.array(np.isnan(score)==False)]
T, p = scipy.stats.f_oneway(df[df["labels"]=="controls"]["cvlfps"],\
df[df["labels"]==0]["cvlfps"],\
df[df["labels"]==1]["cvlfps"],\
df[df["labels"]==2]["cvlfps"])
ax = sns.violinplot(x="labels_name", y="cvlfps", data=df,order=["controls","cluster 1","cluster 2","cluster 3"])
plt.title("ANOVA: t = %s, and p= %s"%(T,p))
plt.savefig(os.path.join(output,"cvlfps.png"))
df = pd.DataFrame()
score = df_scores["matrxraw"].astype(np.float).values
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
LABELS_DICT = {"controls":"controls",0: "cluster 1", 1: "cluster 2", 2: "cluster 3"}
df["labels_name"] = df["labels"].map(LABELS_DICT)
df["matrxraw"] = score[np.array(np.isnan(score)==False)]
T, p = scipy.stats.f_oneway(df[df["labels"]=="controls"]["matrxraw"],\
df[df["labels"]==0]["matrxraw"],\
df[df["labels"]==1]["matrxraw"],\
df[df["labels"]==2]["matrxraw"])
ax = sns.violinplot(x="labels_name", y="matrxraw", data=df,order=["controls","cluster 1","cluster 2","cluster 3"])
plt.title("ANOVA: t = %s, and p= %s"%(T,p))
plt.savefig(os.path.join(output,"matrxraw.png"))
df = pd.DataFrame()
score = df_scores["dstscalc"].astype(np.float).values
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
LABELS_DICT = {"controls":"controls",0: "cluster 1", 1: "cluster 2", 2: "cluster 3"}
df["labels_name"] = df["labels"].map(LABELS_DICT)
df["dstscalc"] = score[np.array(np.isnan(score)==False)]
T, p = scipy.stats.f_oneway(df[df["labels"]=="controls"]["dstscalc"],\
df[df["labels"]==0]["dstscalc"],\
df[df["labels"]==1]["dstscalc"],\
df[df["labels"]==2]["dstscalc"])
ax = sns.violinplot(x="labels_name", y="dstscalc", data=df,order=["controls","cluster 1","cluster 2","cluster 3"])
plt.title("ANOVA: t = %s, and p= %s"%(T,p))
plt.savefig(os.path.join(output,"dstscalc.png"))
df = pd.DataFrame()
score = df_scores["sstscalc"].astype(np.float).values
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
df["labels"]=labels_cluster[np.array(np.isnan(score)==False)]
LABELS_DICT = {"controls":"controls",0: "cluster 1", 1: "cluster 2", 2: "cluster 3"}
df["labels_name"] = df["labels"].map(LABELS_DICT)
df["sstscalc"] = score[np.array(np.isnan(score)==False)]
T, p = scipy.stats.f_oneway(df[df["labels"]=="controls"]["sstscalc"],\
df[df["labels"]==0]["sstscalc"],\
df[df["labels"]==1]["sstscalc"],\
df[df["labels"]==2]["sstscalc"])
ax = sns.violinplot(x="labels_name", y="sstscalc", data=df,order=["controls","cluster 1","cluster 2","cluster 3"])
plt.title("ANOVA: t = %s, and p= %s"%(T,p))
plt.savefig(os.path.join(output,"sstscalc.png"))
################################################################################
| [
"ad247405@is222241.intra.cea.fr"
] | ad247405@is222241.intra.cea.fr |
01834489d2b6648a02c08315475f21f26b0cbe27 | 4013cf1b3b65130f36fac82f23b7e6e373ca3ac3 | /appliers/models.py | f63808e62e84556a454b24d8bd42e0e58404e2cf | [] | no_license | malkoG/HI-ARC-Portal | b89fa3571a155b3a23927a96a9c16d9bcc2a3974 | a3bd3cca8c1dd9c30c840f29cb8ad10227b17a36 | refs/heads/master | 2021-11-27T00:38:51.080620 | 2018-07-29T03:30:44 | 2018-07-29T03:30:44 | 140,441,235 | 1 | 0 | null | 2018-07-10T14:05:05 | 2018-07-10T14:05:05 | null | UTF-8 | Python | false | false | 904 | py | from django.db import models
class Applier(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
# 학회 가입 승인 시 Portal 에서 사용할 아이디/패스워드
username = models.CharField(max_length=30, blank=False)
password = models.CharField(max_length=100, blank=False)
# 학회 가입 승인 시 임원이 봐야 할 필드
realname = models.CharField(max_length=30, blank=False)
student_id = models.CharField(max_length=10, blank=False)
major = models.CharField(max_length=20, blank=False)
grade = models.CharField(max_length=10, blank=False)
phone_number = models.CharField(max_length=15, blank=False)
email = models.CharField(max_length=50, blank=False)
motivation = models.TextField(blank=False)
portfolio = models.TextField()
class Meta:
ordering = ('created_at', ) | [
"rijgndqw012@gmail.com"
] | rijgndqw012@gmail.com |
85741891712bca7d83bab0d31be89df2c4157aa8 | 50d41a527633bcc853e4c4b6a6b749a97112c5c4 | /build-files/setup.py | f35039fb17879188a55535f1001b6e047350f360 | [
"MIT"
] | permissive | deone/ussd-session | 1f5e906e955e37f329946b8b19b3e78ed1cda81d | 1d1d7adb4abc5df2672f7c13f04ad5cad332b0cc | refs/heads/master | 2021-07-05T03:15:29.372744 | 2019-06-20T12:56:50 | 2019-06-20T12:56:50 | 102,523,642 | 1 | 0 | null | 2021-06-10T21:36:37 | 2017-09-05T19:51:53 | Python | UTF-8 | Python | false | false | 1,133 | py | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ussd-session',
version='0.1.6',
description='USSD session app for Django.',
long_description=long_description,
url='https://github.com/deone/ussd-session',
author='Dayo Osikoya',
author_email='alwaysdeone@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='ussd http session json response django',
packages=find_packages(),
install_requires=['Django==1.11.10'],
)
| [
"alwaysdeone@gmail.com"
] | alwaysdeone@gmail.com |
125dccc70afbb6f7e7dc28dbb95cbe4a87f0b0a4 | 71a7d88907c9fc02c989db84fb9b3d452e613278 | /python/maintcal/maintcal/tests/unit/date/test_timezone.py | 21a9ef80b749611d3619dec63b8de4b287765f18 | [] | no_license | raychorn/svn_rackspace | 5ca38426775d5475ad62c06dbfa7ec2ed05f3ca9 | c87323446f2820d165edab7f8c7a7436b7bee19e | refs/heads/main | 2023-01-01T19:16:56.682787 | 2020-10-15T22:20:58 | 2020-10-15T22:20:58 | 304,458,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,493 | py | from maintcal.tests.lib import MaintcalUnitTest
from maintcal.lib.date.maintcal_datetime import MaintcalDatetime
from maintcal.lib.date.timezone import TimezoneType, get_timezone_type
from datetime import datetime, timedelta
class TestTzFile(MaintcalUnitTest):
def test_bogus_timezone_name(self):
self.assertRaises( ValueError, get_timezone_type, timezone_name = 'blah')
def test_get_timezone_type(self):
tzfile = get_timezone_type( 'America/Chicago' )
if tzfile == None:
self.fail()
def testfind_timezone_info_utc(self):
# Just as a reminder, all these START date times are UTC
tzfile = get_timezone_type( 'America/Chicago' )
# Jan 2nd
maintcal_datetime = MaintcalDatetime(2009, 1, 2, 9, 4, 5)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-21600, delta=timedelta(-1, 64800), isdst=0, abbr='CST', isstd=False, isgmt=False) )
# This is in UTC so this is actually Oct 31, 20:59:59 CDT -5
maintcal_datetime = MaintcalDatetime(2009, 11, 1, 1, 59, 59)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-18000, delta=timedelta(-1, 68400), isdst=1, abbr='CDT', isstd=False, isgmt=False) )
def testfind_timezone_info_utc_nov_case(self):
# Just as a reminder, all these START date times are UTC
tzfile = get_timezone_type( 'America/Chicago' )
maintcal_datetime = MaintcalDatetime(2009, 11, 1, 6, 59, 59)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-18000, delta=timedelta(-1, 68400), isdst=1, abbr='CDT', isstd=False, isgmt=False) )
maintcal_datetime = MaintcalDatetime(2009, 11, 1, 7, 00, 00)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-21600, delta=timedelta(-1, 64800), isdst=0, abbr='CST', isstd=False, isgmt=False) )
def test_find_timezone_info_utc_mar_case(self):
# Just as a reminder, all these START date times are UTC
tzfile = get_timezone_type( 'America/Chicago' )
maintcal_datetime = MaintcalDatetime(2009, 3, 8, 7, 59, 59)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-21600, delta=timedelta(-1, 64800), isdst=0, abbr='CST', isstd=False, isgmt=False) )
maintcal_datetime = MaintcalDatetime(2009, 3, 8, 8, 00, 00)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-18000, delta=timedelta(-1, 68400), isdst=1, abbr='CDT', isstd=False, isgmt=False) )
def test_find_timezone_info_utc_nov_case_st_john(self):
# Just as a reminder, all these START date times are UTC
tzfile = get_timezone_type( 'America/St_Johns' )
maintcal_datetime = MaintcalDatetime(2009, 11, 01, 02, 30, 59)
self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
TimezoneType(offset=-9000, delta=timedelta(-1, 77400), isdst=1, abbr='NDT', isstd=False, isgmt=False) )
#maintcal_datetime = MaintcalDatetime(2009, 3, 8, 8, 00, 00)
#self.assertEqual(tzfile.find_timezone_info_utc( maintcal_datetime ),
#TimezoneType(offset=-18000, delta=timedelta(-1, 68400), isdst=1, abbr='CDT', isstd=False, isgmt=False) )
| [
"raychorn@gmail.com"
] | raychorn@gmail.com |
6af79bfad1d2865737951a424c27b6b701a8e616 | 44b3d66dce1b8b87ed7a20b9f2a57d5c40a6c010 | /enso/platform/win32/selection/TextSelection.py | bc29ba7cc343f46d70186dc56b964f2efb3ebfee | [
"BSD-2-Clause"
] | permissive | blackdaemon/enso-launcher-continued | 0b203567c9670d5a6fa95b546d7edf64953ee94c | 346f82811e77caf73560619cdeb16afabfbf1fce | refs/heads/master | 2020-06-03T16:29:31.579370 | 2019-05-22T22:39:32 | 2019-05-22T22:39:32 | 30,513,152 | 7 | 4 | null | null | null | null | UTF-8 | Python | false | false | 23,278 | py | # Copyright (c) 2008, Humanized, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Enso nor the names of its contributors may
# be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Humanized, Inc. ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Humanized, Inc. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Contains all the methods for dealing with text selections.
Usage: Contexts.TextSelection.get() returns a TextSelection object
( one of AbstractTextSelection's subclasses as appropriate to the
front application ). The client code can then use the various
methods of the object to get and set text.
If nothing is selected, this module is supposed to return a TextSelection
object containing no text. It should NOT return None.
"""
# ----------------------------------------------------------------------------
# Imports
# ----------------------------------------------------------------------------
import os
import win32gui
import win32clipboard
import win32con
import logging
from abc import ABCMeta, abstractmethod
from enso.commands.decorators import warn_overriding
import ClipboardBackend
import ClipboardArchive
import _ContextUtils as ContextUtils
from HtmlClipboardFormat import HtmlClipboardFormat
# Internal aliases for external names
clipboardDependent = ContextUtils.clipboardDependent
clipboardPreserving = ClipboardArchive.clipboardPreserving
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
# Time to wait after issuing a shortcut-key command to an application
# LONGTERM TODO: Figure out the best time; the standard time was not
# long enough for long HTML documents.
STANDARD_WAIT_TIME = 600
# Import all of the clipboard format constants for ease of use
CF_RTF = ContextUtils.CF_RTF
CF_HTML = ContextUtils.CF_HTML
CF_CLIPBOARD_VIEWER_IGNORE = ContextUtils.CF_CLIPBOARD_VIEWER_IGNORE
CF_UNICODETEXT = win32con.CF_UNICODETEXT
CF_TEXT = win32con.CF_TEXT
# List of the clipboard formats in which we are able to output text
SUPPORTED_FORMATS = [ CF_HTML,
# CF_RTF, # We don't support this yet.
CF_TEXT,
CF_UNICODETEXT,
CF_CLIPBOARD_VIEWER_IGNORE,
]
# ----------------------------------------------------------------------------
# Private Utility Functions
# ----------------------------------------------------------------------------
def _concatenate( currentTextDict, additionalTextDict ):
"""
Returns a new textDict containing the text
from currentTextDict appended with the text from additionalTextDict.
"""
# The plaintext contents can be combined simply:
newPlainText = currentTextDict.get( "text", u"" ) + \
additionalTextDict.get( "text", u"" )
newHtml = currentTextDict.get( "html", u"" ) + \
currentTextDict.get( "html", u"" )
newDict = {}
if len( newPlainText ) > 0:
newDict[ "text" ] = newPlainText
if len( newHtml ) > 0:
newDict[ "html" ] = newHtml
return newDict
def _textDictToAscii( textDict ):
text = textDict.get( "text", u"" )
return text.encode( "ascii", "replace" )
def _textDictToUtf16( textDict ):
text = textDict.get( "text", u"" )
return text.encode( "utf-16-le" )
def _textDictToClipboardHtml( textDict ):
html = textDict.get( "html", u"" )
clipFormat = HtmlClipboardFormat.fromHtml( html )
return clipFormat.toClipboardHtml()
# ----------------------------------------------------------------------------
# Classes
# ----------------------------------------------------------------------------
class AbstractTextSelection( object ):
"""
As an abstract class, this should never be instantiated; it
defines the interface that all TextSelection objects should
follow.
This class can also contain protected methods which are utility
functions shared between different subclasses.
"""
__metaclass__ = ABCMeta
override = ("replaceSelection", "insertAtCursor", "simulateCopyKeystroke",
"simulateCutKeystroke", "simulatePasteKeystroke")
@clipboardPreserving
def getSelection( self ):
"""
Returns a textDict ( with, potentially, "text" and "html" keys )
containing the text that is selected in the application.
Attempts to do so without clobbering the clipboard.
"""
ContextUtils.clearClipboard()
ClipboardBackend.prepareForClipboardToChange()
self.simulateCopyKeystroke()
ClipboardBackend.waitForClipboardToChange( STANDARD_WAIT_TIME )
result = self.getClipboardText()
return result
@abstractmethod
def replaceSelection( self, textDict ):
"""
Abstract method that must be overridden by subclasses.
Replaces current selection with given textDict. (textDict is
expected to contain "text" and/or "html" keys.) If there is
no selection, inserts textObject at the insertion point.
Returns True if this operation succeeded, False if it did not.
"""
return True
@abstractmethod
def insertAtCursor( self, textDict ):
"""
Abstract method that must be overridden by subclasses.
Appends textDict to the end of the selection. If there is
no selection, inserts textDict at the insertion point.
Returns True if this operation succeeded, False if it did not.
"""
return True
@clipboardDependent
def getClipboardText( self ):
"""
Attempts to get text from clipboard, create a textDict
wrapping it, and return the dictionary; takes care of opening and
closing the clipboard.
If nothing is available, returns an empty dict.
"""
# We attempt to get two pieces of information from the clipboard:
# the formatted text and the plain text.
# Try to get plaintext from unicode text in clipboard; this
# is likely to be a better version of the unformatted text than
# what we could produce by stripping out format tags, and it's
# also easier to use.
if win32clipboard.IsClipboardFormatAvailable( CF_UNICODETEXT ):
try:
plainText = win32clipboard.GetClipboardData( CF_UNICODETEXT )
except win32clipboard.error, e:
# This is a fix for ticket #415.
if e.args[0] == 0:
logging.info( "GetClipboardData() error suppressed." )
return {}
else:
raise
assert isinstance( plainText, unicode ), \
"GetClipboardData returned not-a-unicode object!!"
else:
# If UNICODETEXT is not available, then all other
# plain-text formats are unavailable; however,
# we can fall back on getting the plaintext by stripping
# formatting info out of the formatted text.
plainText = None
# Try to get HTML from clipboard:
if win32clipboard.IsClipboardFormatAvailable( CF_HTML ):
logging.debug( "HTML is available, getting it." )
formatText = win32clipboard.GetClipboardData( CF_HTML )
else:
formatText = None
# TODO if plainText is none and formatText is not none, then
# try to create plainText from formatText by stripping the HTML --
# see how this is done in EnsoTextObject.
newTextDict = {}
if plainText != None:
newTextDict[ "text" ] = plainText
if formatText != None:
newTextDict[ "html" ] = formatText
return newTextDict
@abstractmethod
def simulateCopyKeystroke( self ):
"""
Abstract method that must be overridden by subclasses.
Simulate Ctrl-C or whatever keystroke causes a copy
action in the current context.
"""
return
@abstractmethod
def simulateCutKeystroke( self ):
"""
Abstract method that must be overridden by subclasses.
Simulate Ctrl-X or whatever keystroke causes a cut
action in the current context.
"""
return
@abstractmethod
def simulatePasteKeystroke( self ):
"""
Abstract method that must be overridden by subclasses.
Simulate Ctrl-V or whatever keystroke causes a paste
action in the current context.
"""
return
def _renderClipboardFormat( self, textDict, format ):
"""
Interprets a given clipboard format code and returns contents
rendered into the corresponding format, and explicitly
terminated with null bytes: in other words, the text returned
from this function is ready to be put into the Windows
Clipboard.
"""
# Precondition:
assert( format in [ CF_TEXT, CF_UNICODETEXT, CF_HTML ] )
if format == CF_TEXT:
text = _textDictToAscii( textDict )
terminator = "\0"
elif format == CF_UNICODETEXT:
# For CF_UNICODETEXT we must provide double-null-terminated
# Utf-16:
text = _textDictToUtf16( textDict )
terminator = "\0\0"
elif format == CF_HTML:
text = _textDictToClipboardHtml( textDict )
# No terminator should be used on clipboard html format.
terminator = ""
result = text + terminator
# Postcondition:
assert( type( result ) == str )
return result
def _pasteText( self, textDict ):
"""
Puts the given textObject into the clipboard, then simulates
a paste keystroke in the application. If all goes well, the
text will be inserted into the application, and this function
returns True. If the attempt fails, returns False.
"""
# Create the function which will be used for callback
def getPendingData( formatCode ):
try:
if formatCode == ContextUtils.CF_CLIPBOARD_VIEWER_IGNORE:
return "HumanizedEnsoTextSelectionContext\0"
else:
return self._renderClipboardFormat( textDict, formatCode )
except Exception:
import traceback
logging.error( "Traceback in getPendingData():\n%s" %
traceback.format_exc() )
raise
# Give the above function to clipboard backend, along with the
# list of formats in which we can support pasting
ClipboardBackend.prepareForPasting( getPendingData,
SUPPORTED_FORMATS )
# then type the paste command key, which will cause the app to
# draw the data out of getPendingData.
self.simulatePasteKeystroke()
ClipboardBackend.waitForPaste( STANDARD_WAIT_TIME )
success = ClipboardBackend.finalizePasting()
return success
class DefaultTextSelection( AbstractTextSelection ):
"""
This subclass encapsulates the default methods for getting text
from and putting text into an application, based on Ctrl-C for
copy and Ctrl-V for paste. It is returned by TextSelection.get()
if the currently active application is not one that we have a
special subclass for.
"""
def simulateCopyKeystroke( self ):
"""
Simulate Ctrl-C, which is the copy command in most
applications.
"""
ContextUtils.typeCommandKey( "c" )
def simulateCutKeystroke( self ):
"""
Simulate Ctrl-X, which is the cut command in most
applications.
"""
ContextUtils.typeCommandKey( "x" )
def simulatePasteKeystroke( self ):
"""
Simulate Ctrl-V, which is the paste command in most
applications.
"""
ContextUtils.typeCommandKey( "v" )
@clipboardPreserving
def replaceSelection( self, textDict ):
"""
Replace the selected text with the given textObject by doing a
paste operation. In most applications, a paste operation
replaces any selected text, so this is the behavior we want.
For applications where this is not the case, see
NonReplacingTextSelection.
Returns a boolean telling whether replacement succeeded.
"""
return self._pasteText( textDict )
@clipboardPreserving
def insertAtCursor( self, textDict ):
"""
Inserts the given text at the cursor position without
replacing anything.
Returns a boolean telling whether insertion succeeded.
"""
# LONGTERM TODO:
# find a better way to do this? It's currently not really inserting
# at cursor, it's replacing text selection with text selection + new
# text. This means that, for instance, if I do an insert when
# the cursor is at the beginning of the selection, my new text
# will actually appear at the end of the selection.
#
# If there were a programmatic way to deselect the selection without
# changing the text or moving the cursor, we could do that, then
# issue paste... but I don't know of a way to do this.
# Try to do a copy:
ClipboardBackend.prepareForClipboardToChange()
self.simulateCopyKeystroke()
changed = ClipboardBackend.waitForClipboardToChange( STANDARD_WAIT_TIME )
if not changed:
# There was no selection to copy; just paste the text:
return self._pasteText( textDict )
else:
# There was an existing selection: Concatenate it with
# the new text:
currentText = self.getClipboardText()
newText = _concatenate( currentText, textDict )
return self._pasteText( newText )
class NonReplacingTextSelection( DefaultTextSelection ):
"""
In some applications, notably MoonEdit and Emacs, a paste
does not replace selected text: it inserts
at the cursor. This is the selection context to use for
those applications.
"""
@clipboardPreserving
def replaceSelection( self, textDict ):
"""
Replace the selected text with the given text by first
cutting and discarding the selected text, then pasting in
the new text.
Returns a boolean telling whether replacement succeeded.
"""
ClipboardBackend.prepareForClipboardToChange()
self.simulateCutKeystroke()
ClipboardBackend.waitForClipboardToChange( STANDARD_WAIT_TIME )
return self._pasteText( textDict )
@clipboardPreserving
def insertAtCursor( self, textDict ):
"""
Insert text at cursor position in MoonEdit by doing a paste
operation.
Returns a boolean telling whether insertion succeeded.
"""
return self._pasteText( textDict )
class EmacsTextSelection( NonReplacingTextSelection ):
"""
The subclass to use if the currently active application is Emacs.
"""
# LONGTERM TODO: Figure out what an appropriate behavior
# is if "transient-mark mode" is turned on in emacs.
# (and how we can detect this.)
# A note on replaceSelection in this context:
# the "selection" in Emacs is everything between the point and the
# mark, so it's fairly easy to have stuff selected without
# realizing it, and then lose it because of this function, but I
# don't see a good solution.
def simulatePasteKeystroke( self ):
ContextUtils.typeCommandKey( "y" )
def simulateCopyKeystroke( self ):
ContextUtils.typeSequence( "ESC W" )
def simulateCutKeystroke( self ):
ContextUtils.typeCommandKey( "w" )
class CommandPromptTextSelection( DefaultTextSelection ):
"""
Returned if the currently active application is a Windows
Command Prompt. There is very little that we can do to
interact with the text in a command prompt, so most of
these functions are no-ops.
"""
def simulatePasteKeystroke( self ):
# Alt-space pops up the window menu (the thing you get
# by clicking in the upper-left corner). Then typing
# e and then p selects edit->paste.
ContextUtils.typeAltKey( " " )
ContextUtils.typeSequence( "e p" )
def simulateCopyKeystroke( self ):
# Alt-space pops up the window menu (the thing you get
# by clicking in the upper-left corner). Then typing
# e and then y selects edit->copy.
ContextUtils.typeAltKey( " " )
ContextUtils.typeSequence( "e y" )
def simulateCutKeystroke( self ):
pass
@clipboardPreserving
def insertAtCursor( self, textDict ):
"""
Attempts to insert text into the command prompt window.
Returns a boolean telling whether insertion succeeded.
"""
return self._pasteText( textDict )
class VimTextSelection( DefaultTextSelection ):
"""
Returned if the currently active application is a Windows Vim/gVim.
"""
def simulatePasteKeystroke( self ):
# Shift-Insert should work by default in Windows version of Vim/gVim
ContextUtils.typeShiftKey(win32con.VK_INSERT)
def simulateCopyKeystroke( self ):
# Ctrl-Insert should work by default in Windows version of Vim/gVim
# But yanking the selection will also clear it. So the sequence is:
# copy text to clipboard: Ctrl-Insert
# get to normal mode 2x: Ctrl-[ Ctrl-[
# return to last selection: gv
ContextUtils.typeSequence('CD INS [ [ CU g v')
def simulateCutKeystroke( self ):
# Ctrl-Del should work by default in Windows version of Vim/gVim
ContextUtils.typeCommandKey(win32con.VK_DELETE)
class PuTTYTextSelection( NonReplacingTextSelection ):
"""
Returned if the currently active application is PuTTY terminal client
"""
def simulatePasteKeystroke( self ):
ContextUtils.typeShiftKey(win32con.VK_INSERT)
def simulateCopyKeystroke( self ):
# No copy operation needed, selected text in PuTTY is automatically
# sent into the clipboard right after a selection is made.
pass
def simulateCutKeystroke( self ):
# TODO: No cutting possible?
pass
def getSelection( self ):
"""
Returns a textDict ( with, potentially, "text" and "html" keys )
directly from the clipboard as PuTTY sends selected text
into the clipboard automatically.
"""
return self.getClipboardText()
class LotusNotesTextSelection( DefaultTextSelection ):
"""
Returned if the currently active application is Lotus Notes.
"""
@clipboardPreserving
def replaceSelection( self, textDict ):
"""
Replace the selected text with the given text by first
cutting and discarding the selected text, then pasting in
the new text.
Returns a boolean telling whether replacement succeeded.
"""
text = textDict.get('text')
if text:
win32clipboard.OpenClipboard()
win32clipboard.EmptyClipboard()
win32clipboard.SetClipboardText(text)
win32clipboard.CloseClipboard()
# then type the paste command key, which will cause the app to
# draw the data out of getPendingData.
self.simulatePasteKeystroke()
ClipboardBackend.waitForPaste( STANDARD_WAIT_TIME )
ClipboardBackend.finalizePasting()
return True
else:
return DefaultTextSelection.replaceSelection(self, textDict)
# ----------------------------------------------------------------------------
# Public Function
# ----------------------------------------------------------------------------
def get():
"""
Creates and returns a TextSelection object of a subclass
appropriate to the currently active application.
If no text is selected, this must return a TextSelection object with
no text in it -- NOT a None.
"""
hwnd = win32gui.GetForegroundWindow()
className = ContextUtils.getWindowClassName(hwnd)
tsContext = DefaultTextSelection()
if className == u"Emacs":
tsContext = EmacsTextSelection()
elif className == u"MoonEdit":
tsContext = NonReplacingTextSelection()
elif className == u"ConsoleWindowClass":
processName = ContextUtils.getWindowProcessName(hwnd)
if os.path.basename(processName).lower() == "vim.exe":
tsContext = VimTextSelection()
else:
tsContext = CommandPromptTextSelection()
elif className == u"Vim":
processName = ContextUtils.getWindowProcessName(hwnd)
if processName and os.path.basename(processName).lower() == "gvim.exe":
tsContext = VimTextSelection()
elif className == u"PuTTY":
tsContext = PuTTYTextSelection()
elif className == u"NOTES":
processName = ContextUtils.getWindowProcessName(hwnd)
if processName and os.path.basename(processName).lower() in ("notes.exe", "nlnotes.exe"):
tsContext = LotusNotesTextSelection()
return tsContext
| [
"pavelvitis@gmail.com"
] | pavelvitis@gmail.com |
20e46c354bebb09b49d5f643ec0624ccfb952aff | 1dacbf90eeb384455ab84a8cf63d16e2c9680a90 | /lib/python2.7/site-packages/traitlets/config/loader.py | 0519aba37c2db294a8cbf96f87f65fd4f96a2353 | [
"Python-2.0",
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-unknown"
] | permissive | wangyum/Anaconda | ac7229b21815dd92b0bd1c8b7ec4e85c013b8994 | 2c9002f16bb5c265e0d14f4a2314c86eeaa35cb6 | refs/heads/master | 2022-10-21T15:14:23.464126 | 2022-10-05T12:10:31 | 2022-10-05T12:10:31 | 76,526,728 | 11 | 10 | Apache-2.0 | 2022-10-05T12:10:32 | 2016-12-15T05:26:12 | Python | UTF-8 | Python | false | false | 28,943 | py | # encoding: utf-8
"""A simple configuration system."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import argparse
import copy
import logging
import os
import re
import sys
import json
from ast import literal_eval
from ipython_genutils.path import filefind
from ipython_genutils import py3compat
from ipython_genutils.encoding import DEFAULT_ENCODING
from ipython_genutils.py3compat import unicode_type, iteritems
from traitlets.traitlets import HasTraits, List, Any
#-----------------------------------------------------------------------------
# Exceptions
#-----------------------------------------------------------------------------
class ConfigError(Exception):
pass
class ConfigLoaderError(ConfigError):
pass
class ConfigFileNotFound(ConfigError):
pass
class ArgumentError(ConfigLoaderError):
pass
#-----------------------------------------------------------------------------
# Argparse fix
#-----------------------------------------------------------------------------
# Unfortunately argparse by default prints help messages to stderr instead of
# stdout. This makes it annoying to capture long help screens at the command
# line, since one must know how to pipe stderr, which many users don't know how
# to do. So we override the print_help method with one that defaults to
# stdout and use our class instead.
class ArgumentParser(argparse.ArgumentParser):
"""Simple argparse subclass that prints help to stdout by default."""
def print_help(self, file=None):
if file is None:
file = sys.stdout
return super(ArgumentParser, self).print_help(file)
print_help.__doc__ = argparse.ArgumentParser.print_help.__doc__
#-----------------------------------------------------------------------------
# Config class for holding config information
#-----------------------------------------------------------------------------
class LazyConfigValue(HasTraits):
"""Proxy object for exposing methods on configurable containers
Exposes:
- append, extend, insert on lists
- update on dicts
- update, add on sets
"""
_value = None
# list methods
_extend = List()
_prepend = List()
def append(self, obj):
self._extend.append(obj)
def extend(self, other):
self._extend.extend(other)
def prepend(self, other):
"""like list.extend, but for the front"""
self._prepend[:0] = other
_inserts = List()
def insert(self, index, other):
if not isinstance(index, int):
raise TypeError("An integer is required")
self._inserts.append((index, other))
# dict methods
# update is used for both dict and set
_update = Any()
def update(self, other):
if self._update is None:
if isinstance(other, dict):
self._update = {}
else:
self._update = set()
self._update.update(other)
# set methods
def add(self, obj):
self.update({obj})
def get_value(self, initial):
"""construct the value from the initial one
after applying any insert / extend / update changes
"""
if self._value is not None:
return self._value
value = copy.deepcopy(initial)
if isinstance(value, list):
for idx, obj in self._inserts:
value.insert(idx, obj)
value[:0] = self._prepend
value.extend(self._extend)
elif isinstance(value, dict):
if self._update:
value.update(self._update)
elif isinstance(value, set):
if self._update:
value.update(self._update)
self._value = value
return value
def to_dict(self):
"""return JSONable dict form of my data
Currently update as dict or set, extend, prepend as lists, and inserts as list of tuples.
"""
d = {}
if self._update:
d['update'] = self._update
if self._extend:
d['extend'] = self._extend
if self._prepend:
d['prepend'] = self._prepend
elif self._inserts:
d['inserts'] = self._inserts
return d
def _is_section_key(key):
"""Is a Config key a section name (does it start with a capital)?"""
if key and key[0].upper()==key[0] and not key.startswith('_'):
return True
else:
return False
class Config(dict):
"""An attribute based dict that can do smart merges."""
def __init__(self, *args, **kwds):
dict.__init__(self, *args, **kwds)
self._ensure_subconfig()
def _ensure_subconfig(self):
"""ensure that sub-dicts that should be Config objects are
casts dicts that are under section keys to Config objects,
which is necessary for constructing Config objects from dict literals.
"""
for key in self:
obj = self[key]
if _is_section_key(key) \
and isinstance(obj, dict) \
and not isinstance(obj, Config):
setattr(self, key, Config(obj))
def _merge(self, other):
"""deprecated alias, use Config.merge()"""
self.merge(other)
def merge(self, other):
"""merge another config object into this one"""
to_update = {}
for k, v in iteritems(other):
if k not in self:
to_update[k] = v
else: # I have this key
if isinstance(v, Config) and isinstance(self[k], Config):
# Recursively merge common sub Configs
self[k].merge(v)
else:
# Plain updates for non-Configs
to_update[k] = v
self.update(to_update)
def collisions(self, other):
"""Check for collisions between two config objects.
Returns a dict of the form {"Class": {"trait": "collision message"}}`,
indicating which values have been ignored.
An empty dict indicates no collisions.
"""
collisions = {}
for section in self:
if section not in other:
continue
mine = self[section]
theirs = other[section]
for key in mine:
if key in theirs and mine[key] != theirs[key]:
collisions.setdefault(section, {})
collisions[section][key] = "%r ignored, using %r" % (mine[key], theirs[key])
return collisions
def __contains__(self, key):
# allow nested contains of the form `"Section.key" in config`
if '.' in key:
first, remainder = key.split('.', 1)
if first not in self:
return False
return remainder in self[first]
return super(Config, self).__contains__(key)
# .has_key is deprecated for dictionaries.
has_key = __contains__
def _has_section(self, key):
return _is_section_key(key) and key in self
def copy(self):
return type(self)(dict.copy(self))
def __copy__(self):
return self.copy()
def __deepcopy__(self, memo):
new_config = type(self)()
for key, value in self.items():
if isinstance(value, (Config, LazyConfigValue)):
# deep copy config objects
value = copy.deepcopy(value, memo)
elif type(value) in {dict, list, set, tuple}:
# shallow copy plain container traits
value = copy.copy(value)
new_config[key] = value
return new_config
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
if _is_section_key(key):
c = Config()
dict.__setitem__(self, key, c)
return c
elif not key.startswith('_'):
# undefined, create lazy value, used for container methods
v = LazyConfigValue()
dict.__setitem__(self, key, v)
return v
else:
raise KeyError
def __setitem__(self, key, value):
if _is_section_key(key):
if not isinstance(value, Config):
raise ValueError('values whose keys begin with an uppercase '
'char must be Config instances: %r, %r' % (key, value))
dict.__setitem__(self, key, value)
def __getattr__(self, key):
if key.startswith('__'):
return dict.__getattr__(self, key)
try:
return self.__getitem__(key)
except KeyError as e:
raise AttributeError(e)
def __setattr__(self, key, value):
if key.startswith('__'):
return dict.__setattr__(self, key, value)
try:
self.__setitem__(key, value)
except KeyError as e:
raise AttributeError(e)
def __delattr__(self, key):
if key.startswith('__'):
return dict.__delattr__(self, key)
try:
dict.__delitem__(self, key)
except KeyError as e:
raise AttributeError(e)
#-----------------------------------------------------------------------------
# Config loading classes
#-----------------------------------------------------------------------------
class ConfigLoader(object):
"""A object for loading configurations from just about anywhere.
The resulting configuration is packaged as a :class:`Config`.
Notes
-----
A :class:`ConfigLoader` does one thing: load a config from a source
(file, command line arguments) and returns the data as a :class:`Config` object.
There are lots of things that :class:`ConfigLoader` does not do. It does
not implement complex logic for finding config files. It does not handle
default values or merge multiple configs. These things need to be
handled elsewhere.
"""
def _log_default(self):
from traitlets.log import get_logger
return get_logger()
def __init__(self, log=None):
"""A base class for config loaders.
log : instance of :class:`logging.Logger` to use.
By default loger of :meth:`traitlets.config.application.Application.instance()`
will be used
Examples
--------
>>> cl = ConfigLoader()
>>> config = cl.load_config()
>>> config
{}
"""
self.clear()
if log is None:
self.log = self._log_default()
self.log.debug('Using default logger')
else:
self.log = log
def clear(self):
self.config = Config()
def load_config(self):
"""Load a config from somewhere, return a :class:`Config` instance.
Usually, this will cause self.config to be set and then returned.
However, in most cases, :meth:`ConfigLoader.clear` should be called
to erase any previous state.
"""
self.clear()
return self.config
class FileConfigLoader(ConfigLoader):
"""A base class for file based configurations.
As we add more file based config loaders, the common logic should go
here.
"""
def __init__(self, filename, path=None, **kw):
"""Build a config loader for a filename and path.
Parameters
----------
filename : str
The file name of the config file.
path : str, list, tuple
The path to search for the config file on, or a sequence of
paths to try in order.
"""
super(FileConfigLoader, self).__init__(**kw)
self.filename = filename
self.path = path
self.full_filename = ''
def _find_file(self):
"""Try to find the file by searching the paths."""
self.full_filename = filefind(self.filename, self.path)
class JSONFileConfigLoader(FileConfigLoader):
"""A JSON file loader for config
Can also act as a context manager that rewrite the configuration file to disk on exit.
Example::
with JSONFileConfigLoader('myapp.json','/home/jupyter/configurations/') as c:
c.MyNewConfigurable.new_value = 'Updated'
"""
def load_config(self):
"""Load the config from a file and return it as a Config object."""
self.clear()
try:
self._find_file()
except IOError as e:
raise ConfigFileNotFound(str(e))
dct = self._read_file_as_dict()
self.config = self._convert_to_config(dct)
return self.config
def _read_file_as_dict(self):
with open(self.full_filename) as f:
return json.load(f)
def _convert_to_config(self, dictionary):
if 'version' in dictionary:
version = dictionary.pop('version')
else:
version = 1
self.log.warning("Unrecognized JSON config file version, assuming version {}".format(version))
if version == 1:
return Config(dictionary)
else:
raise ValueError('Unknown version of JSON config file: {version}'.format(version=version))
def __enter__(self):
self.load_config()
return self.config
def __exit__(self, exc_type, exc_value, traceback):
"""
Exit the context manager but do not handle any errors.
In case of any error, we do not want to write the potentially broken
configuration to disk.
"""
self.config.version = 1
json_config = json.dumps(self.config, indent=2)
with open(self.full_filename, 'w') as f:
f.write(json_config)
class PyFileConfigLoader(FileConfigLoader):
"""A config loader for pure python files.
This is responsible for locating a Python config file by filename and
path, then executing it to construct a Config object.
"""
def load_config(self):
"""Load the config from a file and return it as a Config object."""
self.clear()
try:
self._find_file()
except IOError as e:
raise ConfigFileNotFound(str(e))
self._read_file_as_dict()
return self.config
def load_subconfig(self, fname, path=None):
"""Injected into config file namespace as load_subconfig"""
if path is None:
path = self.path
loader = self.__class__(fname, path)
try:
sub_config = loader.load_config()
except ConfigFileNotFound:
# Pass silently if the sub config is not there,
# treat it as an empty config file.
pass
else:
self.config.merge(sub_config)
def _read_file_as_dict(self):
"""Load the config file into self.config, with recursive loading."""
def get_config():
"""Unnecessary now, but a deprecation warning is more trouble than it's worth."""
return self.config
namespace = dict(
c=self.config,
load_subconfig=self.load_subconfig,
get_config=get_config,
__file__=self.full_filename,
)
fs_encoding = sys.getfilesystemencoding() or 'ascii'
conf_filename = self.full_filename.encode(fs_encoding)
py3compat.execfile(conf_filename, namespace)
class CommandLineConfigLoader(ConfigLoader):
"""A config loader for command line arguments.
As we add more command line based loaders, the common logic should go
here.
"""
def _exec_config_str(self, lhs, rhs):
"""execute self.config.<lhs> = <rhs>
* expands ~ with expanduser
* tries to assign with literal_eval, otherwise assigns with just the string,
allowing `--C.a=foobar` and `--C.a="foobar"` to be equivalent. *Not*
equivalent are `--C.a=4` and `--C.a='4'`.
"""
rhs = os.path.expanduser(rhs)
try:
# Try to see if regular Python syntax will work. This
# won't handle strings as the quote marks are removed
# by the system shell.
value = literal_eval(rhs)
except (NameError, SyntaxError, ValueError):
# This case happens if the rhs is a string.
value = rhs
exec(u'self.config.%s = value' % lhs)
def _load_flag(self, cfg):
"""update self.config from a flag, which can be a dict or Config"""
if isinstance(cfg, (dict, Config)):
# don't clobber whole config sections, update
# each section from config:
for sec,c in iteritems(cfg):
self.config[sec].update(c)
else:
raise TypeError("Invalid flag: %r" % cfg)
# raw --identifier=value pattern
# but *also* accept '-' as wordsep, for aliases
# accepts: --foo=a
# --Class.trait=value
# --alias-name=value
# rejects: -foo=value
# --foo
# --Class.trait
kv_pattern = re.compile(r'\-\-[A-Za-z][\w\-]*(\.[\w\-]+)*\=.*')
# just flags, no assignments, with two *or one* leading '-'
# accepts: --foo
# -foo-bar-again
# rejects: --anything=anything
# --two.word
flag_pattern = re.compile(r'\-\-?\w+[\-\w]*$')
class KeyValueConfigLoader(CommandLineConfigLoader):
"""A config loader that loads key value pairs from the command line.
This allows command line options to be gives in the following form::
ipython --profile="foo" --InteractiveShell.autocall=False
"""
def __init__(self, argv=None, aliases=None, flags=None, **kw):
"""Create a key value pair config loader.
Parameters
----------
argv : list
A list that has the form of sys.argv[1:] which has unicode
elements of the form u"key=value". If this is None (default),
then sys.argv[1:] will be used.
aliases : dict
A dict of aliases for configurable traits.
Keys are the short aliases, Values are the resolved trait.
Of the form: `{'alias' : 'Configurable.trait'}`
flags : dict
A dict of flags, keyed by str name. Vaues can be Config objects,
dicts, or "key=value" strings. If Config or dict, when the flag
is triggered, The flag is loaded as `self.config.update(m)`.
Returns
-------
config : Config
The resulting Config object.
Examples
--------
>>> from traitlets.config.loader import KeyValueConfigLoader
>>> cl = KeyValueConfigLoader()
>>> d = cl.load_config(["--A.name='brian'","--B.number=0"])
>>> sorted(d.items())
[('A', {'name': 'brian'}), ('B', {'number': 0})]
"""
super(KeyValueConfigLoader, self).__init__(**kw)
if argv is None:
argv = sys.argv[1:]
self.argv = argv
self.aliases = aliases or {}
self.flags = flags or {}
def clear(self):
super(KeyValueConfigLoader, self).clear()
self.extra_args = []
def _decode_argv(self, argv, enc=None):
"""decode argv if bytes, using stdin.encoding, falling back on default enc"""
uargv = []
if enc is None:
enc = DEFAULT_ENCODING
for arg in argv:
if not isinstance(arg, unicode_type):
# only decode if not already decoded
arg = arg.decode(enc)
uargv.append(arg)
return uargv
def load_config(self, argv=None, aliases=None, flags=None):
"""Parse the configuration and generate the Config object.
After loading, any arguments that are not key-value or
flags will be stored in self.extra_args - a list of
unparsed command-line arguments. This is used for
arguments such as input files or subcommands.
Parameters
----------
argv : list, optional
A list that has the form of sys.argv[1:] which has unicode
elements of the form u"key=value". If this is None (default),
then self.argv will be used.
aliases : dict
A dict of aliases for configurable traits.
Keys are the short aliases, Values are the resolved trait.
Of the form: `{'alias' : 'Configurable.trait'}`
flags : dict
A dict of flags, keyed by str name. Values can be Config objects
or dicts. When the flag is triggered, The config is loaded as
`self.config.update(cfg)`.
"""
self.clear()
if argv is None:
argv = self.argv
if aliases is None:
aliases = self.aliases
if flags is None:
flags = self.flags
# ensure argv is a list of unicode strings:
uargv = self._decode_argv(argv)
for idx,raw in enumerate(uargv):
# strip leading '-'
item = raw.lstrip('-')
if raw == '--':
# don't parse arguments after '--'
# this is useful for relaying arguments to scripts, e.g.
# ipython -i foo.py --matplotlib=qt -- args after '--' go-to-foo.py
self.extra_args.extend(uargv[idx+1:])
break
if kv_pattern.match(raw):
lhs,rhs = item.split('=',1)
# Substitute longnames for aliases.
if lhs in aliases:
lhs = aliases[lhs]
if '.' not in lhs:
# probably a mistyped alias, but not technically illegal
self.log.warning("Unrecognized alias: '%s', it will probably have no effect.", raw)
try:
self._exec_config_str(lhs, rhs)
except Exception:
raise ArgumentError("Invalid argument: '%s'" % raw)
elif flag_pattern.match(raw):
if item in flags:
cfg,help = flags[item]
self._load_flag(cfg)
else:
raise ArgumentError("Unrecognized flag: '%s'"%raw)
elif raw.startswith('-'):
kv = '--'+item
if kv_pattern.match(kv):
raise ArgumentError("Invalid argument: '%s', did you mean '%s'?"%(raw, kv))
else:
raise ArgumentError("Invalid argument: '%s'"%raw)
else:
# keep all args that aren't valid in a list,
# in case our parent knows what to do with them.
self.extra_args.append(item)
return self.config
class ArgParseConfigLoader(CommandLineConfigLoader):
"""A loader that uses the argparse module to load from the command line."""
def __init__(self, argv=None, aliases=None, flags=None, log=None, *parser_args, **parser_kw):
"""Create a config loader for use with argparse.
Parameters
----------
argv : optional, list
If given, used to read command-line arguments from, otherwise
sys.argv[1:] is used.
parser_args : tuple
A tuple of positional arguments that will be passed to the
constructor of :class:`argparse.ArgumentParser`.
parser_kw : dict
A tuple of keyword arguments that will be passed to the
constructor of :class:`argparse.ArgumentParser`.
Returns
-------
config : Config
The resulting Config object.
"""
super(CommandLineConfigLoader, self).__init__(log=log)
self.clear()
if argv is None:
argv = sys.argv[1:]
self.argv = argv
self.aliases = aliases or {}
self.flags = flags or {}
self.parser_args = parser_args
self.version = parser_kw.pop("version", None)
kwargs = dict(argument_default=argparse.SUPPRESS)
kwargs.update(parser_kw)
self.parser_kw = kwargs
def load_config(self, argv=None, aliases=None, flags=None):
"""Parse command line arguments and return as a Config object.
Parameters
----------
args : optional, list
If given, a list with the structure of sys.argv[1:] to parse
arguments from. If not given, the instance's self.argv attribute
(given at construction time) is used."""
self.clear()
if argv is None:
argv = self.argv
if aliases is None:
aliases = self.aliases
if flags is None:
flags = self.flags
self._create_parser(aliases, flags)
self._parse_args(argv)
self._convert_to_config()
return self.config
def get_extra_args(self):
if hasattr(self, 'extra_args'):
return self.extra_args
else:
return []
def _create_parser(self, aliases=None, flags=None):
self.parser = ArgumentParser(*self.parser_args, **self.parser_kw)
self._add_arguments(aliases, flags)
def _add_arguments(self, aliases=None, flags=None):
raise NotImplementedError("subclasses must implement _add_arguments")
def _parse_args(self, args):
"""self.parser->self.parsed_data"""
# decode sys.argv to support unicode command-line options
enc = DEFAULT_ENCODING
uargs = [py3compat.cast_unicode(a, enc) for a in args]
self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs)
def _convert_to_config(self):
"""self.parsed_data->self.config"""
for k, v in iteritems(vars(self.parsed_data)):
exec("self.config.%s = v"%k, locals(), globals())
class KVArgParseConfigLoader(ArgParseConfigLoader):
"""A config loader that loads aliases and flags with argparse,
but will use KVLoader for the rest. This allows better parsing
of common args, such as `ipython -c 'print 5'`, but still gets
arbitrary config with `ipython --InteractiveShell.use_readline=False`"""
def _add_arguments(self, aliases=None, flags=None):
self.alias_flags = {}
# print aliases, flags
if aliases is None:
aliases = self.aliases
if flags is None:
flags = self.flags
paa = self.parser.add_argument
for key,value in iteritems(aliases):
if key in flags:
# flags
nargs = '?'
else:
nargs = None
if len(key) is 1:
paa('-'+key, '--'+key, type=unicode_type, dest=value, nargs=nargs)
else:
paa('--'+key, type=unicode_type, dest=value, nargs=nargs)
for key, (value, help) in iteritems(flags):
if key in self.aliases:
#
self.alias_flags[self.aliases[key]] = value
continue
if len(key) is 1:
paa('-'+key, '--'+key, action='append_const', dest='_flags', const=value)
else:
paa('--'+key, action='append_const', dest='_flags', const=value)
def _convert_to_config(self):
"""self.parsed_data->self.config, parse unrecognized extra args via KVLoader."""
# remove subconfigs list from namespace before transforming the Namespace
if '_flags' in self.parsed_data:
subcs = self.parsed_data._flags
del self.parsed_data._flags
else:
subcs = []
for k, v in iteritems(vars(self.parsed_data)):
if v is None:
# it was a flag that shares the name of an alias
subcs.append(self.alias_flags[k])
else:
# eval the KV assignment
self._exec_config_str(k, v)
for subc in subcs:
self._load_flag(subc)
if self.extra_args:
sub_parser = KeyValueConfigLoader(log=self.log)
sub_parser.load_config(self.extra_args)
self.config.merge(sub_parser.config)
self.extra_args = sub_parser.extra_args
def load_pyconfig_files(config_files, path):
"""Load multiple Python config files, merging each of them in turn.
Parameters
==========
config_files : list of str
List of config files names to load and merge into the config.
path : unicode
The full path to the location of the config files.
"""
config = Config()
for cf in config_files:
loader = PyFileConfigLoader(cf, path=path)
try:
next_config = loader.load_config()
except ConfigFileNotFound:
pass
except:
raise
else:
config.merge(next_config)
return config
| [
"wgyumg@mgail.com"
] | wgyumg@mgail.com |
e524410846d1ac49a2d5220b3bde9e207e0073ca | d5a82c882160e73e4dce5b0dd02d7ec42c0a8c25 | /DataCamp_Natural_Language_Processing_Fundamentals/4.4.1.Improving_your_model.py | fb42edd845e81c04c2274a3cbc11cb255ee90ced | [] | no_license | ContangoRango/Machine_Learning_AI_Blockchain | d31e57682f4b70988f60bfa9d1be598421e9a9eb | 2161721493c14ed89f09a81140c4622dd31bebab | refs/heads/master | 2022-03-22T03:33:23.333076 | 2019-12-08T12:12:49 | 2019-12-08T12:12:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,986 | py | '''Your job in this exercise is to test a few different alpha levels using the Tfidf vectors to determine if there is a better performing combination.
The training and test sets have been created, and tfidf_vectorizer, tfidf_train, and tfidf_test have been computed.'''
#TASK
# Create a list of alphas to try using np.arange(). Values should range from 0 to 1 with steps of 0.1.
# Create a function train_and_predict() that takes in one argument: alpha. The function should:
# Instantiate a MultinomialNB classifier with alpha=alpha.
# Fit it to the training data.
# Compute predictions on the test data.
# Compute and return the accuracy score.
# Using a for loop, print the alpha, score and a newline in between. Use your train_and_predict() function to compute the score. Does the score change along with the alpha? What is the best alpha?
# Create the list of alphas: alphas
alphas = ____
# Define train_and_predict()
____ ____(____):
# Instantiate the classifier: nb_classifier
nb_classifier = ____
# Fit to the training data
____
# Predict the labels: pred
pred = ____
# Compute accuracy: score
score = ____
return score
# Iterate over the alphas and print the corresponding score
for alpha in alphas:
print('Alpha: ', alpha)
print('Score: ', ____)
print()
#SOLUTION
# Create the list of alphas: alphas
alphas = np.arange(0, 1, .1)
# Define train_and_predict()
def train_and_predict(alpha):
# Instantiate the classifier: nb_classifier
nb_classifier = MultinomialNB(alpha=alpha)
# Fit to the training data
nb_classifier.fit(tfidf_train, y_train)
# Predict the labels: pred
pred = nb_classifier.predict(tfidf_test)
# Compute accuracy: score
score = metrics.accuracy_score(y_test, pred)
return score
# Iterate over the alphas and print the corresponding score
for alpha in alphas:
print('Alpha: ', alpha)
print('Score: ', train_and_predict(alpha))
print()
| [
"buryj666@gmail.com"
] | buryj666@gmail.com |
0d5e5bdd7d18d03796431ffffa84106d00541083 | 29f859eee50687a42e4f4ba38f85aa55a45b0b45 | /program.py | 9d8fe2fb527fb0d31faed08531ae921d7d3807ce | [] | no_license | Ratnambar/PythoTest | 7ddd010a689d3fcff3a5c9889433c3b5f138c360 | 33d9138d3f246102d6316cf4a5e8229c70d17392 | refs/heads/master | 2023-02-04T19:45:17.721093 | 2020-12-25T17:48:33 | 2020-12-25T17:48:33 | 324,407,615 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | # first Program
l=[]
def add():
for i in range(int(input())):
l.append(int(input()))
return sum(l)
add()
#second program
Dict2= {"1" : 50,"2" : 60,"3" : 70}
def Max_in_Dict(Dict2):
max=0
d=dict()
for k,v in Dict2.items():
if v>max:
max=v
key = k
d[key]=max
return d
Max_in_Dict(Dict2)
#third program
l = [0,0,0,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1]
def count_max_one(l):
count=0
max=0
for i in range(len(l)):
if l[i]==1:
count+=1
if count>max:
max=count
else:
count=0
return max
count_max_one(l)
#fourth program in sql
create table user(
-> user_id int not null auto_increment,
-> username varchar(100) not null,
-> password varchar(50) not null,
-> primary key(user_id));
table address
create table addresses( id int not null auto_increment, user_id int, street varchar(100) not null, pincode int(20) not null, country varchar(50) not null, state varchar(50) not null, phone varchar(20) not null, primary key(id), foreign key (user_id) references user(user_id) );
| [
"ratnambar123gupta@gmail.com"
] | ratnambar123gupta@gmail.com |
9910b246044b2c58c1009f3ca86c485952f4398d | 6a34130452f9ac3b6c7d8d515bc12ae5980a4695 | /CIFAR10_LeNet5/LeNet_CIFAR10.py | 5a0f3c0be17a797ad29664b766aaf4d0b3fd25e0 | [] | no_license | ximitiejiang/MyCodeForPytorch | 8b251a85d522d7857feab78d5ef673b559fbe052 | cb1f48f3a75e4fd25b544d2a31adcf72cd64f760 | refs/heads/master | 2021-08-17T04:38:42.475584 | 2018-12-27T09:56:50 | 2018-12-27T09:56:50 | 150,266,218 | 1 | 0 | null | 2018-10-21T04:18:35 | 2018-09-25T13:03:27 | Python | UTF-8 | Python | false | false | 7,342 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 26 09:49:10 2018
@author: suliang
参考‘深度学习框架pytorch入门与实践’ chapter-2, 正确率53%
该代码来自于pytorch官方英文教程,实现LeNet5
"""
import torch
import torchvision
from torch.autograd import Variable
from torchvision import transforms, datasets
from torchvision.transforms import ToPILImage
import time
'''
创建数据变换器
'''
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize(mean=[0.5,0.5,0.5],
std=[0.5,0.5,0.5])])
'''
加载数据
'''
trainset = datasets.CIFAR10(root = '/Users/suliang/MyDatasets/CIFAR10/',
transform = transform,
train = True,
download = True)
testset = datasets.CIFAR10(root = '/Users/suliang/MyDatasets/CIFAR10/',
transform = transform,
train = False,
download = True)
# 数据加载后直接使用:
# trainset是一个可迭代对象,等效于set(data, label)
data, label = trainset[0]
# 这是已定义好顺序的标签
classes = ('plane', 'car', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck')
'''
查看trainset中的第102张图片内容
'''
data, label = trainset[102] # tensor, size() = 3x32x32
print(classes[label])
data = data*0.5 + 0.5 # 归一化的逆运算
#img = ToPILImage(data) #不知道为什么这个命令不能显示
#img.show()
import matplotlib.pyplot as plt
import numpy as np
data = np.transpose(data, (1,2,0)) # 转秩,从CxHxW变为HxWxC, shape = 32x32x3
plt.imshow(data) # plt.imshow()只能显示numpy的HxWxC格式
'''
分包数据:dataloader函数生成一个可迭代的数据结构, 提供按大小打包,随机分发的服务
'''
trainloader = torch.utils.data.DataLoader(trainset,
batch_size = 4,
shuffle = True,
num_workers = 2)
testloader = torch.utils.data.DataLoader(testset,
batch_size = 4,
shuffle = True,
num_workers = 2)
'''
查看第一个随机batch的图片
'''
data, label = next(iter(trainloader)) # size = 4x3x32x32
data = data*0.5 + 0.5
# make_grid可以把一个batch的数据重组拼接成一行,所以batch=4的图片被排列为1行
# 变为3x32x128, 同时默认有p=2的padding,所以变为3x(32+4)x(128+10)
data = torchvision.utils.make_grid(data) # size = 3x36x138
# data = T.resize(200)(data) # 图片变大,同样显示空间像素更多了,看会不会变清楚?
data = np.transpose(data, (1,2,0))
plt.imshow(data)
'''
构建网络
- 需要继承父类网络,并定义每一层网络类型和节点数
- 需要定义前向计算
'''
import torch
import torch.nn.functional as F
class LeNet1(torch.nn.Module):
def __init__(self):
super(LeNet1, self).__init__()
self.conv1 = torch.nn.Conv2d(3,6,5) # 输入3通道,输出6通道,卷积核5x5
self.conv2 = torch.nn.Conv2d(6,16,5) # 输入6通道,输出16通道,卷积核5x5
self.fc1 = torch.nn.Linear(16*5*5, 120) #
self.fc2 = torch.nn.Linear(120, 84)
self.fc3 = torch.nn.Linear(84, 10)
def forward(self, x):
x = F.max_pool2d(F.relu(self.conv1(x)), (2,2))
x = F.max_pool2d(F.relu(self.conv2(x)), 2)
x = x.view(x.size()[0], -1)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return x
'''
新建网络对象
'''
# 初始化
net = LeNet1()
criteria = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(net.parameters(), lr = 0.001, momentum = 0.9)
'''
打印模型参数进行查看
'''
f = open("parmas.txt", "w")
print(net, file = f)
print('-'*20,file=f)
# 查看model.parameters()
params = list(net.parameters()) # 获得该网络所有可学习参数列表 - 已初始化,未训练
print('length of params: '.format(len(params))) # 打印参数长度 = 10,params[0]就代表conv1的参数
# 查看model.state_dict()
print("Model's state_dict:", file = f)
for param_tensor in net.state_dict():
print(param_tensor, "\t", net.state_dict()[param_tensor].size(), file=f)
print('-'*20,file=f)
# 查看optimizer.state_dict()
print("Optimizer's state_dict:", file = f)
for var_name in optimizer.state_dict():
print(var_name, "\t", optimizer.state_dict()[var_name], file=f)
print('-'*20,file=f)
f.close()
'''
训练网络
'''
since = time.time()
for epoch in range(2):
running_loss = 0.0
for i, data in enumerate(trainloader, 0): # 取出每个batch
inputs, labels = data
inputs, labels = Variable(inputs), Variable(labels)
optimizer.zero_grad()
outputs = net(inputs)
loss = criteria(outputs, labels)
loss.backward()
optimizer.step()
running_loss +=loss.data[0]
if i%2000 == 1999:
print('[%d, %5d] loss: %.3f' %(epoch+1, i+1, running_loss/2000))
running_loss = 0.0
print('finished training!')
print('last time: {}'.format(time.time()-since))
print()
# 查看model.state_dict()
f = open("parmas.txt", "a+") # 只有模式a和a+能够让指针在文件末尾
print("After training, Model's state_dict:", file = f)
for param_tensor in net.state_dict():
print(param_tensor, "\t", net.state_dict()[param_tensor].size(), file=f)
print('-'*20,file=f)
# 查看optimizer.state_dict()
print("After training, Optimizer's state_dict:", file = f)
for var_name in optimizer.state_dict():
print(var_name, "\t", optimizer.state_dict()[var_name], file=f)
print('-'*20,file=f)
f.close()
'''
保存模型
'''
PATH = '/Users/suliang/MyCodeForPytorch/CIFAR10_LeNet5/saved_model'
torch.save(net.state_dict(), PATH)
'''
预测1个batch看看效果
'''
images, labels = next(iter(testloader))
for i in range(4):
print(classes[labels[i]])
newimgs = images*0.5 + 0.5
newimgs = torchvision.utils.make_grid(newimgs)
newimgs = np.transpose(newimgs,(1,2,0))
plt.imshow(newimgs)
# 每一个image会得到一个概率数组,包含对每个类预测概率值,其中概率最高值就是预测
outputs = net(Variable(images))
_,predicted = torch.max(outputs.data, 1) # 返回output中每行最大值,1代表列坍塌
for i in range(4):
print(classes[predicted[i]])
'''
预测整个数据集
'''
correct = 0 # 初始化正确张数 = 0
total = 0 # 初始化总张数 = 0
for data in testloader:
images, labels = data
outputs = net(Variable(images))
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
print('correct rate: {}%'.format(100*correct/total))
# tensor相除,小于0的结果会显示成0,所以乘以100就是百分比的显示方式
'''
检测是否可以在GPU运行
'''
if torch.cuda.is_available():
net.cuda()
images = images.cuda()
labels = labels.cuda()
outputs = net(variable(images))
loss = criteria(outputs, Variable(labels))
| [
"ximitiejiang@163.com"
] | ximitiejiang@163.com |
a5c59876489acae90e3406792dbaa57267963669 | 6478723d180a8ef39941ba04b80c1eca9f437323 | /5. Longest Palindromic Substring.py | 65d4b6571eb7df73c29e99b510e7b4a8f0598327 | [] | no_license | NiuNiu-jupiter/Leetcode | 2a49a365898ecca393cb1eb53a47f4501b25952d | e278ae6ded32f6a2d054ae11ad8fcc45e7bd0f86 | refs/heads/master | 2022-11-22T01:05:57.417538 | 2020-07-28T23:34:39 | 2020-07-28T23:34:39 | 182,104,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | """
Given a string s, find the longest palindromic substring in s. You may assume that the maximum length of s is 1000.
Example 1:
Input: "babad"
Output: "bab"
Note: "aba" is also a valid answer.
Example 2:
Input: "cbbd"
Output: "bb"
"""
def longestPalindrome(s):
"""
:type s: str
:rtype: str
Based on two scenarios, find a index, expand to head and tail from this center point.
"""
res = ""
for i in range(len(s)):
# odd case, like "aba"
tmp = self.helper(s, i, i)
if len(tmp) > len(res):
res = tmp
# even case, like "abba"
tmp = self.helper(s, i, i+1)
if len(tmp) > len(res):
res = tmp
return res
# get the longest palindrome, l, r are the middle indexes
# from inner to outer
def helper(s, l, r):
while l >= 0 and r <= len(s)-1 and s[l] == s[r]:
l -= 1
r += 1
return s[l+1:r]
| [
"cmyumo.zhang@gmail.com"
] | cmyumo.zhang@gmail.com |
4d6f975ccfbd53b199a02aed7e90dec5ff4610d2 | 3f09e77f169780968eb4bd5dc24b6927ed87dfa2 | /src/Problems/Spiral_Matrix_II.py | 5bc244a74bc4140bf7b1987d3a88e1d09de558ba | [] | no_license | zouyuanrenren/Leetcode | ad921836256c31e31cf079cf8e671a8f865c0660 | 188b104b81e6c73792f7c803c0fa025f9413a484 | refs/heads/master | 2020-12-24T16:59:12.464615 | 2015-01-19T21:59:15 | 2015-01-19T21:59:15 | 26,719,111 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,659 | py | '''
Created on 19 Jan 2015
@author: Yuan
'''
'''
Two possible solutions:
1. go through 1 ... n**2, fill them into the cell of the matrix;
2. go through the matrix in a spiral order, fill the numbers incrementally;
In this program we go for the first solution.
'''
class Solution:
# @return a list of lists of integer
def generateMatrix(self, n):
horizontal = True
forward = True
row = 0
col = 0
num = 0
matrix = [[0 for i in range(n)] for j in range(n)]
for num in range(1,n ** 2+1):
matrix[row][col] = num
if horizontal:
if forward:
if col + 1 >= n or matrix[row][col+1] > 0:
horizontal = False
row += 1
else:
col += 1
else:
if col -1 < 0 or matrix[row][col -1] > 0:
horizontal = False
row -= 1
else:
col -= 1
else:
if forward:
if row + 1 >= n or matrix[row+1][col] > 0:
horizontal = True
forward = False
col -= 1
else:
row += 1
else:
if row - 1 < 0 or matrix[row-1][col] > 0:
horizontal = True
forward = True
col += 1
else:
row -= 1
return matrix
print Solution().generateMatrix(4) | [
"y.ren@abdn.ac.uk"
] | y.ren@abdn.ac.uk |
e670936267ff1c2864e2a6bea1ff377c2d3ae534 | f09dc121f213f2881df3572288b7ee5b39246d73 | /aliyun-python-sdk-oos/aliyunsdkoos/request/v20190601/CancelExecutionRequest.py | 6e26c04670c78b3cf8f7ecd083d362a8ef226e5b | [
"Apache-2.0"
] | permissive | hetw/aliyun-openapi-python-sdk | 2f31378ad6be0896fb8090423f607e9c7d3ae774 | 7443eacee9fbbaa93c7975c6dbec92d3c364c577 | refs/heads/master | 2023-01-19T22:42:36.214770 | 2020-12-04T10:55:14 | 2020-12-04T10:55:14 | 318,689,093 | 1 | 0 | NOASSERTION | 2020-12-05T03:03:03 | 2020-12-05T03:03:03 | null | UTF-8 | Python | false | false | 1,446 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkoos.endpoint import endpoint_data
class CancelExecutionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'oos', '2019-06-01', 'CancelExecution','oos')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ExecutionId(self):
return self.get_query_params().get('ExecutionId')
def set_ExecutionId(self,ExecutionId):
self.add_query_param('ExecutionId',ExecutionId) | [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
0e91abe36fe8af630beaddf7c335d82b75229805 | b12487590e46aa0b244c9800bf1727020e2c1d79 | /TG/accounts/views.py | 1b42f7c468956cfc188379ec33477c3c0accbfce | [] | no_license | heejinshin/team-project-eatseasy | 70dd91d8c49197410e117a13236c70c2816a4b50 | 54f6595a97c3e465d41fe54c3eafc731d5901c09 | refs/heads/main | 2023-07-24T12:09:43.464885 | 2021-09-04T03:17:22 | 2021-09-04T03:17:22 | 392,162,002 | 0 | 0 | null | 2021-09-04T03:17:22 | 2021-08-03T02:31:13 | JavaScript | UTF-8 | Python | false | false | 954 | py | from django.shortcuts import render
from django.views import generic
from django.contrib.auth.forms import UserCreationForm
from django.urls import reverse_lazy
from django.contrib.auth.mixins import AccessMixin
from django.views.defaults import permission_denied
class UserCreateView(generic.CreateView):
template_name = 'registration/register.html'
form_class = UserCreationForm
success_url = reverse_lazy('accounts:register_done')
class UserCreateDoneView(generic.TemplateView):
template_name = 'registration/register_done.html'
class OwnerOnlyMixin(AccessMixin):
raise_exception = True
permission_denied_message = "Owner only can update/delete the object"
def get(self, request, *args, **kwargs):
self.object = self.get_object() # 모델 인스턴스 얻기
if self.request.user != self.object.owner:
self.handle_no_permission()
return super().get(request, *args, **kwargs) | [
"nugeat23@gmail.com"
] | nugeat23@gmail.com |
750f942333db9c067f0c2af7e1c688c2f1cb3f49 | 2c9ac88bdba143bacb25b527a35b934c206c06f0 | /tweet_me/tweets/models.py | 19e3c5c44eae2e0eb14706234cd9a1600bd76c30 | [] | no_license | Asingjr2/tweet_me_dj | f9d64a42dc15a24233c369c255b01034a3d8418a | 34f9f30d8fa673fab038c8521bcacb8b0e14db95 | refs/heads/master | 2020-03-22T08:48:21.900726 | 2018-07-23T03:25:17 | 2018-07-23T03:25:17 | 139,792,031 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,396 | py | from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.urls import reverse
import uuid
from .validators.validation_errors import clean_text
# Create your models here.
# Updating manager for message object to allow reference to self in retweet
class MessageManager(models.Manager):
def retweet(self, creator, parent_obj):
if primary_key.parent:
og_parent = parent_obj.parent
else:
og_parent = parent_obj
obj = self.model(
parent = og_parent,
creator = user,
text = parent_obj.text
)
obj.save()
return obj
class Message(models.Model):
parent = models.ForeignKey("self", blank=True, null=True, on_delete = models.CASCADE)
id = models.UUIDField(primary_key=True, default= uuid.uuid4, editable=False)
text = models.CharField(max_length=100, validators=[clean_text])
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
creator = models.ForeignKey(User, on_delete = models.CASCADE, default=1)
objects = MessageManager()
class Meta:
ordering = ["-created_at",]
def __str__(self):
return "Text content is {}".format(self.text)
def get_absolute_url(self):
return reverse("detail", args=(self.id,))
| [
"asingjr2@gmail.com"
] | asingjr2@gmail.com |
71d95ba0b2cf56f580e7e9979840ab7d45a732c9 | 06bc4f76ba6099277d408ddc16edd95fabcd95d0 | /ext/sam/SCRIPTS/python/sam/make_ic_file.py | a2798c8685f5f2201ea6f1874dc547bb211b388e | [
"MIT"
] | permissive | kbren/uwnet | 31d4267b23012cda61646b94aa8a9e283017f83b | aac01e243c19686b10c214b1c56b0bb7b7e06a07 | refs/heads/master | 2020-04-06T20:31:25.849132 | 2018-11-14T22:02:52 | 2018-11-14T22:05:01 | 157,771,236 | 0 | 0 | MIT | 2018-11-15T20:52:46 | 2018-11-15T20:52:45 | null | UTF-8 | Python | false | false | 1,348 | py | #!/usr/bin/env python
import argparse
import os
import xarray as xr
def rename_var(z, coords):
rename_d = {'xc': 'x', 'yc': 'y', 'ys': 'y', 'xs': 'x'}
rename_d = {key: val for key, val in rename_d.items() if key in z.dims}
return z.rename(rename_d).assign_coords(**coords)
def parse_arguments():
parser = argparse.ArgumentParser(description='Create initial condition for'
'coarse resolution SAM')
parser.add_argument('basedir', type=str)
parser.add_argument('output', type=str)
parser.add_argument('-t', '--time', type=int, default=0)
return parser.parse_args()
args = parse_arguments()
time = args.time
stagger_path = os.path.join(args.basedir, "stagger", "3d", "all.nc")
center_path = os.path.join(args.basedir, "coarse", "3d", "all.nc")
stat_path = os.path.join(args.basedir, "stat.nc")
cent = xr.open_dataset(center_path, engine='netcdf4').isel(time=time)
time = cent.time
stag = (xr.open_dataset(stagger_path).sel(time=time)
.apply(lambda x: rename_var(x, cent.coords)))
stat = xr.open_dataset(stat_path)
ic = xr.Dataset({
'U': stag.U,
'V': stag.V,
'W': cent.W,
'QV': cent.QV,
'TABS': cent.TABS,
'QN': cent.QN,
'QP': cent.QP,
'RHO': stat.RHO.sel(time=time),
'Ps': stat.Ps.sel(time=time)
})
ic.to_netcdf(args.output)
| [
"nbren12@gmail.com"
] | nbren12@gmail.com |
99b41def6fd1d2374de9435c323b651065f847d7 | 67d76057aee86c43d32e0b74f3ac94d521ee03d8 | /tests/journal.api/firewall_sanity.py | aff2e094d2fec8304653e999f6a72f58be4fe721 | [
"BSD-3-Clause"
] | permissive | jlmaurer/pyre | 0f94b1855bf029210f07c528747221751e37687f | 6af38a83621d7d6228d147b4bb94f97fbb10f6e2 | refs/heads/master | 2023-05-25T04:33:19.907452 | 2020-06-18T14:07:54 | 2020-06-18T14:07:54 | 273,362,988 | 0 | 0 | NOASSERTION | 2021-06-10T23:42:14 | 2020-06-18T23:50:28 | null | UTF-8 | Python | false | false | 363 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <michael.aivazis@para-sim.com>
# (c) 1998-2020 all rights reserved
def test():
"""
Verify the channel is accessible
"""
# access
from journal import firewall
# all done
return
# main
if __name__ == "__main__":
# run the test
test()
# end of file
| [
"michael.aivazis@para-sim.com"
] | michael.aivazis@para-sim.com |
67d3cc773bc7ae6f9f07541943eac88f2ea88bff | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/slxos/v17r_1_01a/brocade_mpls_rpc/show_mpls_bypass_lsp_name_extensive/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_common_info/lsp_config_frr_admin_groups/__init__.py | 0ab4e564eb9593ee83def8ffdf9e7b42e82d81b3 | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,479 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import lsp_admin_group
class lsp_config_frr_admin_groups(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-bypass-lsp-name-extensive/output/bypass-lsp/show-mpls-lsp-extensive-info/show-mpls-lsp-common-info/lsp-config-frr-admin-groups. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__lsp_admin_group',)
_yang_name = 'lsp-config-frr-admin-groups'
_rest_name = 'lsp-config-frr-admin-groups'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__lsp_admin_group = YANGDynClass(base=lsp_admin_group.lsp_admin_group, is_container='container', presence=False, yang_name="lsp-admin-group", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-bypass-lsp-name-extensive', u'output', u'bypass-lsp', u'show-mpls-lsp-extensive-info', u'show-mpls-lsp-common-info', u'lsp-config-frr-admin-groups']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-bypass-lsp-name-extensive', u'output', u'bypass-lsp', u'lsp-config-frr-admin-groups']
def _get_lsp_admin_group(self):
"""
Getter method for lsp_admin_group, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_extensive/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_common_info/lsp_config_frr_admin_groups/lsp_admin_group (container)
"""
return self.__lsp_admin_group
def _set_lsp_admin_group(self, v, load=False):
"""
Setter method for lsp_admin_group, mapped from YANG variable /brocade_mpls_rpc/show_mpls_bypass_lsp_name_extensive/output/bypass_lsp/show_mpls_lsp_extensive_info/show_mpls_lsp_common_info/lsp_config_frr_admin_groups/lsp_admin_group (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_admin_group is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_admin_group() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_admin_group.lsp_admin_group, is_container='container', presence=False, yang_name="lsp-admin-group", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_admin_group must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_admin_group.lsp_admin_group, is_container='container', presence=False, yang_name="lsp-admin-group", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_admin_group = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_admin_group(self):
self.__lsp_admin_group = YANGDynClass(base=lsp_admin_group.lsp_admin_group, is_container='container', presence=False, yang_name="lsp-admin-group", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
lsp_admin_group = __builtin__.property(_get_lsp_admin_group, _set_lsp_admin_group)
_pyangbind_elements = {'lsp_admin_group': lsp_admin_group, }
| [
"badaniya@brocade.com"
] | badaniya@brocade.com |
3a67240c0fe4fc3c2611cf0b12e141adadf6adcf | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_29131.py | b2149fd624633e6da886f98bfba13e8e6f47fb0c | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | # Django / xhtml2pdf - object has no attribute 'encode'
<link href="http://fonts.googleapis.com/css?family=Lato:400,700" rel="stylesheet" type="text/css">
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
75079e74736b89ccc2a829a05d003e9830e09796 | 5da2f1f49a35b7c446abf6e56982cb0d91cf4915 | /utils/triage_team.py | effc1457b335ab43c71e79e4263cc10768721176 | [
"Apache-2.0"
] | permissive | python/core-workflow | 632992bb65b051cd7af89d0f2bbad046062e5cc8 | a0db0d98b41ef1debf443c988c450de278932ee2 | refs/heads/main | 2023-08-28T20:43:27.464184 | 2023-07-23T13:36:51 | 2023-07-23T13:36:51 | 76,080,865 | 94 | 74 | Apache-2.0 | 2023-07-23T13:36:52 | 2016-12-10T00:36:07 | Python | UTF-8 | Python | false | false | 1,505 | py | # One time script for creating the triage team, and adding the necessary repos to the team
import os
import asyncio
import aiohttp
from gidgethub.aiohttp import GitHubAPI
import cachetools
cache = cachetools.LRUCache(maxsize=500)
async def get_core_repos(gh, team_id):
"""
Return the team's public repos
"""
async for repo in gh.getiter(f"/teams/{team_id}/repos"):
if not repo["private"] and not repo["fork"]:
print(repo)
yield repo["full_name"]
async def get_team(gh, team_name):
"""
Get a team by name (slug)
"""
return await gh.getitem(f"/orgs/python/teams/{team_name}")
async def main():
"""
- Get Python core team
- Get Python core's public repos
- Create Python triage team, assign the repos
:return:
"""
async with aiohttp.ClientSession() as session:
# must have repo, and admin:org permissions
gh = GitHubAPI(session, "python", oauth_token=os.getenv("GH_AUTH"), cache=cache)
core_team = await get_team(gh, "python-core")
repo_names = [repo async for repo in get_core_repos(gh, core_team["id"])]
await gh.post(
"/orgs/python/teams",
data={
"name": "Python triage",
"description": "Triagers for core Python",
"maintainers": ["mariatta", "zware", "vstinner"],
"privacy": "closed",
"repo_names": repo_names,
},
)
asyncio.run(main())
| [
"noreply@github.com"
] | python.noreply@github.com |
c04e73246dee908a69e18c15464cd9ff14b54944 | 39689ee725bc7183d5d59fb34f7d2ffe5fd6ad36 | /ABC_C/ABC010C.py | 1cec8071d54a2dd07ac82ba752123cce6c881a01 | [] | no_license | yu5shi8/AtCoder | b6eb920a9046bdfa98012dd3fc65f75f16214ffe | f9ca69001ece8379e3a70c993c44b540f8be2217 | refs/heads/master | 2021-06-15T17:58:07.027699 | 2021-03-20T14:04:03 | 2021-03-20T14:04:03 | 177,757,053 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | # -*- coding: utf-8 -*-
# C - 浮気調査
# https://atcoder.jp/contests/abc010/tasks/abc010_3
txa, tya, txb, tyb, T, V = map(int, input().split())
n = int(input())
time = T * V
for i in range(n):
x, y = map(int, input().split())
a = ((x-txa)**2 + (y-tya)**2) ** 0.5
b = ((x-txb)**2 + (y-tyb)**2) ** 0.5
if (a + b) <= time:
print('YES')
exit()
print('NO')
# 21:25 - 21:41(WA)- 21:42(WA)- 21:45(WA)- 21:50(WA)- 21:51(AC)
| [
"royal_unicorn411@hotmail.co.jp"
] | royal_unicorn411@hotmail.co.jp |
28d9c080f28741dc23fd084ad52fa0a2fb5b01aa | 1498d0999af02644e784a40dcaf35f3fd834a495 | /models/app_funcs.py | 12c3265aca6309bfcbafe50d4117e0e449529709 | [] | no_license | gibil5/matrix | 349f3b156b644f2a3b3a0319701ee3044482c7be | f8723780bce6ef30fe0e4adbfce2a69a127395e4 | refs/heads/master | 2020-07-07T05:28:19.887904 | 2020-01-20T02:17:47 | 2020-01-20T02:17:47 | 203,264,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,215 | py | # -*- coding: utf-8 -*-
import datetime
# ----------------------------------------------------------- Getters -------------------------
def search_patient_by_id_document(self):
print()
print('Search Patient')
# Protect against On change first time calls
if self.dni_pre != False:
# Search Patient - by ID IDOC
patient = self.env['oeh.medical.patient'].search([
('x_id_doc', '=', self.dni_pre),
],
order='write_date desc',
limit=1,
)
# Search Patient - by DNI
if patient.name == False:
patient = self.env['oeh.medical.patient'].search([
('x_dni', '=', self.dni_pre),
],
order='write_date desc',
limit=1,
)
#print(patient.name)
return patient.id
else:
return False
# ----------------------------------------------- Time Funcs --------------------------------
#@api.multi
def time_delta(self, appointment_date, delta_min):
"""
Time Delta
"""
date_format = "%Y-%m-%d %H:%M:%S"
new_dt = datetime.datetime.strptime(appointment_date, date_format) + datetime.timedelta(hours=0, minutes=delta_min)
new_str = new_dt.strftime(date_format)
return new_str
| [
"jrevilla55@gmail.com"
] | jrevilla55@gmail.com |
4497b03a3490654e2f445a07d358babd068ed3e5 | 3ec84a6e34f9bc709cb203f8b3f668f2b6697e2a | /python20200322-master/class_Python기초/py14리스트/py14_ex05_유효점수.py | 0c5287859317d69d2b75b0815424bd3bc7eda728 | [] | no_license | eopr12/pythonclass | 52079bd99358ac73664beed236659b97c8b63d40 | 2526fe255969a799f6c534c9db6bff9e4eccd877 | refs/heads/master | 2022-07-10T11:17:31.692754 | 2020-05-16T08:43:00 | 2020-05-16T08:43:00 | 263,377,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,709 | py |
###################################
# 심사 위원의 점수를 입력하여 유효 점수와 평균을 출력하는 프로그램을 작성합니다.
# 유효점수는 최고점과 최저점을 제외한 점수이며 합계와 평균은 유효점수로 계산합니다.
# 유효점수 합계 구하는 부분을 함수를 이용하시오.
# 평균 구하는 부분을 함수를 이용하시오.
# 평균은 소수점 두 자리까지만 출력하시오.
#
#
# ▪ 실행결과예시
# 심사 위원 수를 입력하시오 : 5
# 심사 위원 점수 입력 : 7
# 심사 위원 점수 입력 : 9
# 심사 위원 점수 입력 : 4
# 심사 위원 점수 입력 : 8
# 심사 위원 점수 입력 : 5
# 유효점수 : 5 7 8
# 합계 : 20
# 평균 : 6.67
#
#
# . 심사 위원수를 입력 받는다.
# . 심사 위원의 점수 입력 받아서 "점수리스트"에 저장.
# 몇 번 입력 받아야 하는가? 심사 위원수 만큼
# . 리스트 정렬하기.
# . 1번방부터 마지막방 -1 까지 합계를 구하는 메서드 만들기
# . 평균을 구하는 메서드 만들기.
# . 합계를 구하고 출력한다.
# . 평균을 구하고 출력한다.
###################################
def getList():
result = None
try:
심사위원수 = input("심사 위원 수를 입력하시오 : ")
심사위원수 = int(심사위원수)
result = []
for i in range(0, 심사위원수, 1):
성적 = input("심사 위원 점수 입력 : ")
성적 = int(성적)
result.append(성적)
except Exception as ex:
print("getList 예외", ex)
pass
return result
def getSum(리스트):
result = None
try:
result = sum(리스트)
except Exception as ex:
print("getSum 예외", ex)
return result
def getAvg(리스트):
result = None
try:
합계 = sum(리스트)
result = 합계 / len(리스트)
except Exception as ex:
print("getAvg 예외", ex)
return result
def printScore(리스트):
print("유효 점수 : ", end="")
for i in 리스트:
print(i, end=", ")
print()
def main():
# 점수 입력 받기
입력점수리스트 = getList()
# 정렬 리스트 만들기
입력점수리스트.sort()
# 유효점수 리스트 만들기
입력점수리스트.sort()
유효점수리스트 = 입력점수리스트[1:-1]
# 유효점수 출력
printScore(유효점수리스트)
합계 = getSum(유효점수리스트)
print("합계 : %s " % 합계)
평균 = getAvg(유효점수리스트)
print("평균 : %.2f " % 평균)
if __name__ == "__main__":
main()
| [
"kye9565@gmail.com"
] | kye9565@gmail.com |
d2c145bef6974660ef0683dcac9cd247c4e1ef79 | 49663ea34b41c8180d7484f778f5cad2e701d220 | /tests/macsec/test_deployment.py | 3ca8ddad561c60fa4765f2af4e28d6b2b4a6a42c | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | stepanblyschak/sonic-mgmt | ed08c98e7bff1615b057daa8711686aa5986073d | a1ae1e0b4e9927e6f52916f76121780d19ec3e54 | refs/heads/master | 2023-04-07T01:30:11.403900 | 2023-03-29T10:16:52 | 2023-03-29T10:16:52 | 135,678,178 | 0 | 0 | NOASSERTION | 2023-03-29T16:13:55 | 2018-06-01T06:41:49 | Python | UTF-8 | Python | false | false | 1,169 | py | from time import sleep
import pytest
import logging
import re
import scapy.all as scapy
import ptf.testutils as testutils
from collections import Counter
from tests.common.utilities import wait_until
from tests.common.devices.eos import EosHost
from tests.common import config_reload
from .macsec_helper import *
from .macsec_config_helper import *
from .macsec_platform_helper import *
logger = logging.getLogger(__name__)
pytestmark = [
pytest.mark.macsec_required,
pytest.mark.topology("t0", "t2"),
]
class TestDeployment():
@pytest.mark.disable_loganalyzer
def test_config_reload(self, duthost, ctrl_links, policy, cipher_suite, send_sci, wait_mka_establish):
# Save the original config file
duthost.shell("cp /etc/sonic/config_db.json config_db.json")
# Save the current config file
duthost.shell("sonic-cfggen -d --print-data > /etc/sonic/config_db.json")
config_reload(duthost)
assert wait_until(300, 6, 12, check_appl_db, duthost, ctrl_links, policy, cipher_suite, send_sci)
# Recover the original config file
duthost.shell("sudo cp config_db.json /etc/sonic/config_db.json")
| [
"noreply@github.com"
] | stepanblyschak.noreply@github.com |
4df9cfc6d1b75281ac300b929f4a3dca7b878afa | ce7049875789f199c5e4eecd98aca2cab9188c63 | /bat/__init__.py | 2107b9c7298cd04ed6f2701f0780ef9db6a868bf | [
"Apache-2.0"
] | permissive | chixsh/bat | dc98bbb300418ace1bcbcfd76fade5cdf3684fb3 | d578a4f1266db5cc9ffeb933b07f10e081883628 | refs/heads/master | 2021-07-22T12:59:15.062689 | 2017-11-01T19:06:55 | 2017-11-01T19:06:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | __author__ = 'Brian Wylie'
__email__ = 'brian.wylie@kitware.com'
__version__ = '0.3.3'
| [
"briford.wylie@gmail.com"
] | briford.wylie@gmail.com |
1c52c5aea4f1c9c33fe87240c6e5927fdc227ee6 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2462/60708/289114.py | 79e6c35a208cdfcd2d26f8810223b0868e5a5299 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | def find(list,l,r,index):
if(l>=r):
return index
mid=(l+r)//2
if(list[mid]>list[mid-1] and list[mid]>list[mid+1]):
index=mid
find(list,l,mid-1,index)
return index
else:
index=find(list,l,mid-1,index)
index=find(list,mid+1,r,index)
return index
if __name__ == '__main__':
temp = input().split(",")
list = []
for item in temp:
list.append(int(item))
print(find(list, 0, len(list) - 1,-1)) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
9f1bb560cac4870270ac3d6933b5685fa2300ca3 | 510a6545c9bd300d8477287d887ab41f3d385154 | /notifications/tests/test_notifications.py | f1f2ab52b5addf6936de773d288032eed6338918 | [
"MIT"
] | permissive | City-of-Helsinki/open-city-signups | 222a80905e46443e1dc933e033628ca1c84293c5 | 3c36d3c1cba6f6fc85deadc54fb49a4318f4b1d4 | refs/heads/master | 2021-06-08T14:13:13.921115 | 2018-10-26T10:27:29 | 2018-10-26T10:27:29 | 133,327,658 | 0 | 1 | MIT | 2021-04-20T17:36:23 | 2018-05-14T08:15:49 | Python | UTF-8 | Python | false | false | 3,546 | py | import pytest
from notifications.enums import NotificationType
from notifications.models import NotificationTemplate, NotificationTemplateException
from notifications.utils import render_notification_template
@pytest.fixture
def notification_template(settings):
settings.LANGUAGES = (('fi', 'Finnish'), ('en', 'English'))
template = NotificationTemplate.objects.language('en').create(
type=NotificationType.SIGNUP_CREATED,
subject="test subject, variable value: {{ subject_var }}!",
html_body="<b>test html body</b>, variable value: {{ html_body_var }}!",
text_body="test text body, variable value: {{ text_body_var }}!",
)
template.set_current_language('fi')
template.subject = "testiotsikko, muuttujan arvo: {{ subject_var }}!"
template.html_body = "<b>testihötömölöruumis</b>, muuttujan arvo: {{ html_body_var }}!"
template.text_body = "testitekstiruumis, muuttujan arvo: {{ text_body_var }}!"
template.save()
return template
def test_notification_template_rendering(notification_template):
context = {
'extra_var': 'foo',
'subject_var': 'bar',
'html_body_var': 'html_baz',
'text_body_var': 'text_baz',
}
rendered = render_notification_template(NotificationType.SIGNUP_CREATED, context, 'en')
assert len(rendered) == 3
assert rendered.subject == "test subject, variable value: bar!"
assert rendered.html_body == "<b>test html body</b>, variable value: html_baz!"
assert rendered.text_body == "test text body, variable value: text_baz!"
rendered = render_notification_template(NotificationType.SIGNUP_CREATED, context, 'fi')
assert len(rendered) == 3
assert rendered.subject == "testiotsikko, muuttujan arvo: bar!"
assert rendered.html_body == "<b>testihötömölöruumis</b>, muuttujan arvo: html_baz!"
assert rendered.text_body == "testitekstiruumis, muuttujan arvo: text_baz!"
def test_notification_template_rendering_no_text_body_provided(notification_template):
context = {
'extra_var': 'foo',
'subject_var': 'bar',
'html_body_var': 'html_baz',
'text_body_var': 'text_baz',
}
notification_template.set_current_language('fi')
notification_template.text_body = ''
notification_template.save()
notification_template.set_current_language('en')
notification_template.text_body = ''
notification_template.save()
rendered = render_notification_template(NotificationType.SIGNUP_CREATED, context, 'en')
assert len(rendered) == 3
assert rendered.subject == "test subject, variable value: bar!"
assert rendered.html_body == "<b>test html body</b>, variable value: html_baz!"
assert rendered.text_body == "test html body, variable value: html_baz!"
rendered = render_notification_template(NotificationType.SIGNUP_CREATED, context, 'fi')
assert len(rendered) == 3
assert rendered.subject == "testiotsikko, muuttujan arvo: bar!"
assert rendered.html_body == "<b>testihötömölöruumis</b>, muuttujan arvo: html_baz!"
assert rendered.text_body == "testihötömölöruumis, muuttujan arvo: html_baz!"
def test_undefined_rendering_context_variable(notification_template):
context = {
'extra_var': 'foo',
'subject_var': 'bar',
'text_body_var': 'baz',
}
with pytest.raises(NotificationTemplateException) as e:
render_notification_template(NotificationType.SIGNUP_CREATED, context, 'fi')
assert "'html_body_var' is undefined" in str(e)
| [
"tuomas.haapala@anders.fi"
] | tuomas.haapala@anders.fi |
df0ecaeaa4628690db3134296179e93c570c3b90 | 43480eab7292731fc077eca80d03c601039605e0 | /official/nlp/modeling/networks/bert_encoder_test.py | b5405ac9b8b59aa3829924ce3973d3a8475bc35d | [
"Apache-2.0"
] | permissive | thortom/models | e91d259789ebfef8606ea616f5c5869b2be080f6 | 57e263422b51032b424efd50e0501001052ea083 | refs/heads/master | 2022-12-07T08:56:04.682731 | 2020-09-03T14:35:23 | 2020-09-03T14:35:23 | 292,594,338 | 0 | 0 | Apache-2.0 | 2020-09-03T14:32:07 | 2020-09-03T14:32:06 | null | UTF-8 | Python | false | false | 9,168 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for transformer-based text encoder network."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Import libraries
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import
from official.nlp.modeling.networks import bert_encoder
# This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It
# guarantees forward compatibility of this code for the V2 switchover.
@keras_parameterized.run_all_keras_modes
class BertEncoderTest(keras_parameterized.TestCase):
def tearDown(self):
super(BertEncoderTest, self).tearDown()
tf.keras.mixed_precision.experimental.set_policy("float32")
def test_network_creation(self):
hidden_size = 32
sequence_length = 21
# Create a small BertEncoder for testing.
test_network = bert_encoder.BertEncoder(
vocab_size=100,
hidden_size=hidden_size,
num_attention_heads=2,
num_layers=3)
# Create the inputs (note that the first dimension is implicit).
word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
data, pooled = test_network([word_ids, mask, type_ids])
self.assertIsInstance(test_network.transformer_layers, list)
self.assertLen(test_network.transformer_layers, 3)
self.assertIsInstance(test_network.pooler_layer, tf.keras.layers.Dense)
expected_data_shape = [None, sequence_length, hidden_size]
expected_pooled_shape = [None, hidden_size]
self.assertAllEqual(expected_data_shape, data.shape.as_list())
self.assertAllEqual(expected_pooled_shape, pooled.shape.as_list())
# The default output dtype is float32.
self.assertAllEqual(tf.float32, data.dtype)
self.assertAllEqual(tf.float32, pooled.dtype)
def test_all_encoder_outputs_network_creation(self):
hidden_size = 32
sequence_length = 21
# Create a small BertEncoder for testing.
test_network = bert_encoder.BertEncoder(
vocab_size=100,
hidden_size=hidden_size,
num_attention_heads=2,
num_layers=3,
return_all_encoder_outputs=True)
# Create the inputs (note that the first dimension is implicit).
word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
all_encoder_outputs, pooled = test_network([word_ids, mask, type_ids])
expected_data_shape = [None, sequence_length, hidden_size]
expected_pooled_shape = [None, hidden_size]
self.assertLen(all_encoder_outputs, 3)
for data in all_encoder_outputs:
self.assertAllEqual(expected_data_shape, data.shape.as_list())
self.assertAllEqual(expected_pooled_shape, pooled.shape.as_list())
# The default output dtype is float32.
self.assertAllEqual(tf.float32, all_encoder_outputs[-1].dtype)
self.assertAllEqual(tf.float32, pooled.dtype)
def test_network_creation_with_float16_dtype(self):
hidden_size = 32
sequence_length = 21
tf.keras.mixed_precision.experimental.set_policy("mixed_float16")
# Create a small BertEncoder for testing.
test_network = bert_encoder.BertEncoder(
vocab_size=100,
hidden_size=hidden_size,
num_attention_heads=2,
num_layers=3)
# Create the inputs (note that the first dimension is implicit).
word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
data, pooled = test_network([word_ids, mask, type_ids])
expected_data_shape = [None, sequence_length, hidden_size]
expected_pooled_shape = [None, hidden_size]
self.assertAllEqual(expected_data_shape, data.shape.as_list())
self.assertAllEqual(expected_pooled_shape, pooled.shape.as_list())
# If float_dtype is set to float16, the data output is float32 (from a layer
# norm) and pool output should be float16.
self.assertAllEqual(tf.float32, data.dtype)
self.assertAllEqual(tf.float16, pooled.dtype)
@parameterized.named_parameters(
("all_sequence", None, 21),
("output_range", 1, 1),
)
def test_network_invocation(self, output_range, out_seq_len):
hidden_size = 32
sequence_length = 21
vocab_size = 57
num_types = 7
# Create a small BertEncoder for testing.
test_network = bert_encoder.BertEncoder(
vocab_size=vocab_size,
hidden_size=hidden_size,
num_attention_heads=2,
num_layers=3,
type_vocab_size=num_types,
output_range=output_range)
# Create the inputs (note that the first dimension is implicit).
word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32)
data, pooled = test_network([word_ids, mask, type_ids])
# Create a model based off of this network:
model = tf.keras.Model([word_ids, mask, type_ids], [data, pooled])
# Invoke the model. We can't validate the output data here (the model is too
# complex) but this will catch structural runtime errors.
batch_size = 3
word_id_data = np.random.randint(
vocab_size, size=(batch_size, sequence_length))
mask_data = np.random.randint(2, size=(batch_size, sequence_length))
type_id_data = np.random.randint(
num_types, size=(batch_size, sequence_length))
_ = model.predict([word_id_data, mask_data, type_id_data])
# Creates a BertEncoder with max_sequence_length != sequence_length
max_sequence_length = 128
test_network = bert_encoder.BertEncoder(
vocab_size=vocab_size,
hidden_size=hidden_size,
max_sequence_length=max_sequence_length,
num_attention_heads=2,
num_layers=3,
type_vocab_size=num_types)
model = tf.keras.Model([word_ids, mask, type_ids], [data, pooled])
outputs = model.predict([word_id_data, mask_data, type_id_data])
self.assertEqual(outputs[0].shape[1], out_seq_len)
# Creates a BertEncoder with embedding_width != hidden_size
test_network = bert_encoder.BertEncoder(
vocab_size=vocab_size,
hidden_size=hidden_size,
max_sequence_length=max_sequence_length,
num_attention_heads=2,
num_layers=3,
type_vocab_size=num_types,
embedding_width=16)
model = tf.keras.Model([word_ids, mask, type_ids], [data, pooled])
outputs = model.predict([word_id_data, mask_data, type_id_data])
self.assertEqual(outputs[0].shape[-1], hidden_size)
self.assertTrue(hasattr(test_network, "_embedding_projection"))
def test_serialize_deserialize(self):
tf.keras.mixed_precision.experimental.set_policy("mixed_float16")
# Create a network object that sets all of its config options.
kwargs = dict(
vocab_size=100,
hidden_size=32,
num_layers=3,
num_attention_heads=2,
max_sequence_length=21,
type_vocab_size=12,
intermediate_size=1223,
activation="relu",
dropout_rate=0.05,
attention_dropout_rate=0.22,
initializer="glorot_uniform",
return_all_encoder_outputs=False,
output_range=-1,
embedding_width=16)
network = bert_encoder.BertEncoder(**kwargs)
expected_config = dict(kwargs)
expected_config["activation"] = tf.keras.activations.serialize(
tf.keras.activations.get(expected_config["activation"]))
expected_config["initializer"] = tf.keras.initializers.serialize(
tf.keras.initializers.get(expected_config["initializer"]))
self.assertEqual(network.get_config(), expected_config)
# Create another network object from the first object's config.
new_network = bert_encoder.BertEncoder.from_config(network.get_config())
# Validate that the config can be forced to JSON.
_ = new_network.to_json()
# If the serialization was successful, the new config should match the old.
self.assertAllEqual(network.get_config(), new_network.get_config())
if __name__ == "__main__":
tf.test.main()
| [
"gardener@tensorflow.org"
] | gardener@tensorflow.org |
319849dec7617cd17631edf37a228080d5809019 | 0ccab2965458454d6a4802b47d33310e43c10d8f | /W5D3_Arrays and Matrices/list_comp.py | d169019b665f0ba72db17e5e56480c4e4022ebd6 | [] | no_license | jazib-mahmood-attainu/Ambedkar_Batch | 11e66125647b3b348d4567862f8fc20a3457b2f0 | c99be9a401b8d00f6ca47398f48e90ead98f4898 | refs/heads/main | 2023-08-01T13:13:43.357769 | 2021-09-25T03:54:27 | 2021-09-25T03:54:27 | 390,405,238 | 16 | 10 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | """
list comprehension is a shortcut way to make a list
"""
l1 = []
n = 5
for i in range(n):
l1.append(i)
print(l1)
#Short hand operation
m = 5
l2 = [i for i in range(m)]
print(l2) | [
"jazib.prof@gmail.com"
] | jazib.prof@gmail.com |
7b8c34804f93edd534f34f4c67b52e71d00660bc | 7284e65314d263d80f5743cb70e2a26de73f6e8d | /compiler/code.py | 2533bc459ba6e6578016150aacedbc5b93b8f274 | [] | no_license | roctbb/pithon | e6833b899b0e4008936b7a472d430925e14dbe7c | 298297c5a11cfee423fd10ab603a47fe640970da | refs/heads/master | 2021-01-20T01:28:19.710475 | 2019-03-10T17:18:56 | 2019-03-10T17:18:56 | 101,291,135 | 21 | 4 | null | 2019-03-10T17:18:57 | 2017-08-24T12:11:50 | Python | UTF-8 | Python | false | false | 615 | py | import random
for i in range(10):
print(i)
print("10...Лотерея!!!")
prizes = ["А-а-а-втомобиль!", "Банка с огурцами", "Орущая кошка, покормите ее уже!",
"Чирик", "Путевка в Крым, но кажется далеко от моря", "Вьетнамские флешбэк"]
people = ["Дед Макар", "Путин", "Шмель", "Твоя собака"]
for participant in people:
prize = random.choice(prizes)
print("{0} получает лот '{1}'! Поздравляем!"
.format(participant, prize)) | [
"roctbb@gmail.com"
] | roctbb@gmail.com |
0e03b92016cf2f6d83f50b4e4f0c14a7fa172c76 | ecf9902d67a65563bf5014d3d2693fc44b610fb8 | /P1030-NextGreaterNode.py | 728eddab328e734dac825309ad725e210c299d30 | [] | no_license | hkmamike/leetcode | 18956c0ecefe9c315d45b67d9a12625ea342cc00 | cb4fa5ce108e4f4cedec4a3c28d3bb2f980e3831 | refs/heads/master | 2021-06-13T16:36:21.732382 | 2019-09-15T19:26:37 | 2019-09-15T19:26:37 | 133,134,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 743 | py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def nextLargerNodes(self, head: ListNode) -> List[int]:
result = []
unprocessed = []
i = 0
while head:
while len(unprocessed) > 0 and head.val > unprocessed[-1][0]:
index = unprocessed.pop()[1]
result[index] = head.val
unprocessed.append((head.val, i))
result.append(-1)
i += 1
head = head.next
while len(unprocessed) > 0:
index = unprocessed.pop()[1]
result[index] = 0
return result
| [
"mikebrianleung@gmail.com"
] | mikebrianleung@gmail.com |
424a73eea0697c89335e5751c2c818fdece50608 | a9b0f97b97967d0c4688b7df1aeaf1809eac3db6 | /tests/test_muffin.py | 0f5a0f08226f77172ba017f64355263021aae229 | [
"BSD-3-Clause",
"MIT"
] | permissive | intermezzo-fr/muffin | 6a720ef21a3e0498296fbe9d5b41bf2652a705e8 | 92f64ef58bf5ecdcfb66e8fb9f545c497e7d3b90 | refs/heads/master | 2021-01-18T04:37:57.459706 | 2015-02-03T13:24:32 | 2015-02-03T13:24:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | """ Tests for `muffin` module. """
def test_app(client):
response = client.get('/')
assert response.status_code == 200
assert "Hello world!" in response.text
response = client.get('/404', status=404)
assert response.status_code == 404
response = client.get('/json')
assert response.json
assert response.json['json'] == 'here'
response = client.get('/db-sync')
assert response.json
response = client.get('/db-async')
assert response
| [
"horneds@gmail.com"
] | horneds@gmail.com |
5fa2a321652584e5647cd2d6e47991f075ab54ab | f38ac7dfc887f71d201eca3a8adf8bb593982d30 | /src/VersionControlProvider/IssueDefault.py | 6f02950a457e23ece3f54a78b005f76a61d3caef | [
"Apache-2.0"
] | permissive | flexiooss/flexio-flow | ed442c052d599715034f1d796bdc279cacf11f7c | f350a6abb211789f4caaf9002a9c61f1be75e598 | refs/heads/master | 2023-09-01T21:37:33.276016 | 2022-09-15T07:24:53 | 2022-09-15T07:24:53 | 159,647,155 | 0 | 0 | Apache-2.0 | 2022-12-08T10:36:30 | 2018-11-29T10:17:02 | Python | UTF-8 | Python | false | false | 949 | py | from __future__ import annotations
from VersionControlProvider.Issue import Issue
from VersionControlProvider.IssueState import IssueState
class IssueDefault(Issue):
def get_ref(self) -> str:
if self.number is None:
raise ValueError('Issue should have a number')
return '{prefix!s}{number!s}'.format(prefix=self.PREFIX, number=self.number)
def __dict__(self):
issue: dict = {
'title': self.title
}
if self.body is not None:
issue['body'] = self.body
if self.milestone is not None:
issue['milestone'] = self.milestone
if self.url is not None:
issue['url'] = self.url
if self.state is not None:
issue['state'] = self.state.value
if len(self.labels):
issue['labels'] = self.labels
if len(self.assignees):
issue['assignees'] = self.assignees
return issue
| [
"thomas@flexio.fr"
] | thomas@flexio.fr |
87886b5df391638a216b771d4cb08fdb89191766 | d2153eab38e5be8401162bb4913a938fe264c124 | /tweets/migrations/0002_auto_20190529_0025.py | 91bcc4663d23be43b247831aa6a3b39e57eadf73 | [
"MIT"
] | permissive | rakibulislam01/Tweetme | 4379a6724e96db9dd8b11ba069e2a74c7274f142 | ae787b1b6c0303ba8a52a804764e9fc5853b2219 | refs/heads/master | 2021-06-17T23:41:06.043076 | 2020-01-21T07:50:17 | 2020-01-21T07:50:17 | 188,770,184 | 0 | 0 | MIT | 2021-03-19T22:30:22 | 2019-05-27T04:21:42 | Python | UTF-8 | Python | false | false | 637 | py | # Generated by Django 2.1.7 on 2019-05-28 18:25
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('tweets', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='tweet',
name='timestamp',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='tweet',
name='updated',
field=models.DateTimeField(auto_now=True),
),
]
| [
"mrakibul70@gmail.com"
] | mrakibul70@gmail.com |
f455a59e3f0d8cecfa45773441c3a3aba3879ed8 | aec61d93dfa83483d33664ca2e9ea25754ed3ae0 | /model/utils/augmentations_bbox_seg_960_540.py | c3cb146a3091dbcb2ff4f7ef35559d9aa2d70409 | [] | no_license | unicoe/visual_code | cc9b84e88e625e0935cda7950b34b488a40356cf | 47e155ec61afe40f7aab540a4e62bb1697d00704 | refs/heads/master | 2020-11-27T08:31:10.530677 | 2019-12-21T03:39:21 | 2019-12-21T03:39:21 | 229,371,331 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,487 | py | import torch
from torchvision import transforms
import cv2
import PIL.Image
import numpy as np
import types
from numpy import random
import scipy.misc as m
import pdb
def mkdir(path):
import os
path = path.strip()
path = path.rstrip("\\")
isExists = os.path.exists(path)
if not isExists:
os.makedirs(path)
print(path + 'ok')
return True
else:
print(path + 'failed!')
return False
# TODO 2019-01-06 11:28:59,需要对程序的正确性进行验证
# 设置随机数种子,保证实验的可复现
def set_seed(seed):
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
# torch.cuda.manual_seed_all(seed)
set_seed(47)
def intersect(box_a, box_b):
max_xy = np.minimum(box_a[:, 2:], box_b[2:])
min_xy = np.maximum(box_a[:, :2], box_b[:2])
inter = np.clip((max_xy - min_xy), a_min=0, a_max=np.inf)
return inter[:, 0] * inter[:, 1]
def jaccard_numpy(box_a, box_b):
"""Compute the jaccard overlap of two sets of boxes. The jaccard overlap
is simply the intersection over union of two boxes.
E.g.:
A ∩ B / A ∪ B = A ∩ B / (area(A) + area(B) - A ∩ B)
Args:
box_a: Multiple bounding boxes, Shape: [num_boxes,4]
box_b: Single bounding box, Shape: [4]
Return:
jaccard overlap: Shape: [box_a.shape[0], box_a.shape[1]]
"""
inter = intersect(box_a, box_b)
area_a = ((box_a[:, 2]-box_a[:, 0]) *
(box_a[:, 3]-box_a[:, 1])) # [A,B]
area_b = ((box_b[2]-box_b[0]) *
(box_b[3]-box_b[1])) # [A,B]
union = area_a + area_b - inter
return inter / union # [A,B]
class Compose(object):
"""Composes several augmentations together.
Args:
transforms (List[Transform]): list of transforms to compose.
Example:
>>> augmentations.Compose([
>>> transforms.CenterCrop(10),
>>> transforms.ToTensor(),
>>> ])
"""
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img, boxes=None, labels=None, seglbl=None):
for t in self.transforms:
img, boxes, labels, seglbl = t(img, boxes, labels, seglbl)
return img, boxes, labels, seglbl
class Lambda(object):
"""Applies a lambda as a transform."""
def __init__(self, lambd):
assert isinstance(lambd, types.LambdaType)
self.lambd = lambd
def __call__(self, img, boxes=None, labels=None, seglbl=None):
return self.lambd(img, boxes, labels, seglbl)
class ConvertFromInts(object):
def __call__(self, image, boxes=None, labels=None, seglbl=None):
# 增加 seglbl标注,返回np.int32
# return image.astype(np.float32), boxes, labels, seglbl.astype(np.int32)
seglbl = np.array(seglbl, dtype=np.int32)
return image.astype(np.float32), boxes, labels, seglbl
class SubtractMeans(object):
def __init__(self, mean):
self.mean = np.array(mean, dtype=np.float32)
# 只对图像进行了操作, 其他的按原本返回
def __call__(self, image, boxes=None, labels=None, seglbl=None):
image = image.astype(np.float32)
image -= self.mean
seglbl = np.array(seglbl, dtype=np.int32)
return image.astype(np.float32), boxes, labels, seglbl
class ToAbsoluteCoords(object):
# 计算boxes,对应于原图的数值坐标
def __call__(self, image, boxes=None, labels=None, seglbl=None):
height, width, channels = image.shape
boxes[:, 0] *= width
boxes[:, 2] *= width
boxes[:, 1] *= height
boxes[:, 3] *= height
return image, boxes, labels, seglbl
class ToPercentCoords(object):
# 计算boxes,对应于原图的百分比坐标
def __call__(self, image, boxes=None, labels=None, seglbl=None):
height, width, channels = image.shape
boxes[:, 0] /= width
boxes[:, 2] /= width
boxes[:, 1] /= height
boxes[:, 3] /= height
return image, boxes, labels, seglbl
class Resize(object):
def __init__(self, size=[960,540]):
#self.size = size if isinstance(size,tuple) else (size,size)
self.size = size
# print("before resize: ")
# print(self.size)
def __call__(self, image, boxes=None, labels=None, seglbl=None):
# print("before resize: ")
# print(image.shape)
# print(seglbl.shape)
image = cv2.resize(image, (self.size[0], self.size[1]))
# seglbl = cv2.resize(seglbl, (self.size[0], self.size[1]))
# 要保证 image resize之后, seglbl也能够进行同样的resize操作
# 明确一下,这个是否能保证??
# print(type(seglbl))
# seglbl = PIL.Image.fromarray(np.int32(seglbl))
# 对语义分割结果的图片处理得到
#PIL Image resize 格式: img = img.resize((width, height))
# print(seglbl.shape)
# pdb.set_trace()
# seglbl = seglbl.resize((self.size[0], self.size[1]))
# classes = np.unique(seglbl)
seglbl = seglbl.astype(float)
seglbl = m.imresize(seglbl, (self.size[1], self.size[0]), "nearest", mode="F")
seglbl = seglbl.astype(int)
# print(seglbl.shape)
# print(seglbl)
# if not np.all(classes == np.unique(seglbl)):
# print("WARN: resizing labels yielded fewer classes")
#
# if not np.all(np.unique(seglbl[seglbl != self.ignore_index]) < self.n_classes):
# print("after det", classes, np.unique(seglbl))
# raise ValueError("Segmentation map contained invalid class values")
# print("after resize: ")
# print(image.shape)
# print(seglbl.size)
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/Resize")
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/Resize_seglbl")
#
# im_name = random.randint(1, 1000000)
#
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/Resize/" + str(im_name) + ".png",
# image)
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/Resize_seglbl/" + str(im_name) + ".png",
# seglbl)
return image, boxes, labels, seglbl
# 随机饱和度
class RandomSaturation(object):
def __init__(self, lower=0.5, upper=1.5):
self.lower = lower
self.upper = upper
assert self.upper >= self.lower, "contrast upper must be >= lower."
assert self.lower >= 0, "contrast lower must be non-negative."
def __call__(self, image, boxes=None, labels=None, seglbl=None):
if random.randint(2):
image[:, :, 1] *= random.uniform(self.lower, self.upper)
return image, boxes, labels, seglbl
# 随机色调
class RandomHue(object):
def __init__(self, delta=18.0):
assert delta >= 0.0 and delta <= 360.0
self.delta = delta
def __call__(self, image, boxes=None, labels=None, seglbl=None):
if random.randint(2):
image[:, :, 0] += random.uniform(-self.delta, self.delta)
image[:, :, 0][image[:, :, 0] > 360.0] -= 360.0
image[:, :, 0][image[:, :, 0] < 0.0] += 360.0
return image, boxes, labels, seglbl
# 随机光噪声
class RandomLightingNoise(object):
def __init__(self):
self.perms = ((0, 1, 2), (0, 2, 1),
(1, 0, 2), (1, 2, 0),
(2, 0, 1), (2, 1, 0))
def __call__(self, image, boxes=None, labels=None, seglbl=None):
if random.randint(2):
swap = self.perms[random.randint(len(self.perms))]
shuffle = SwapChannels(swap) # shuffle channels
image = shuffle(image)
return image, boxes, labels, seglbl
# 颜色空间转换
class ConvertColor(object):
def __init__(self, current='BGR', transform='HSV'):
self.transform = transform
self.current = current
def __call__(self, image, boxes=None, labels=None, seglbl=None):
if self.current == 'BGR' and self.transform == 'HSV':
image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
elif self.current == 'HSV' and self.transform == 'BGR':
image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR)
else:
raise NotImplementedError
return image, boxes, labels, seglbl
# 随机对比度
class RandomContrast(object):
def __init__(self, lower=0.5, upper=1.5):
self.lower = lower
self.upper = upper
assert self.upper >= self.lower, "contrast upper must be >= lower."
assert self.lower >= 0, "contrast lower must be non-negative."
# expects float image
def __call__(self, image, boxes=None, labels=None, seglbl=None):
if random.randint(2):
alpha = random.uniform(self.lower, self.upper)
image *= alpha
return image, boxes, labels, seglbl
# 随机亮度
class RandomBrightness(object):
def __init__(self, delta=32):
assert delta >= 0.0
assert delta <= 255.0
self.delta = delta
def __call__(self, image, boxes=None, labels=None, seglbl=None):
if random.randint(2):
delta = random.uniform(-self.delta, self.delta)
image += delta
return image, boxes, labels, seglbl
class ToCV2Image(object):
def __call__(self, tensor, boxes=None, labels=None, seglbl=None):
return tensor.cpu().numpy().astype(np.float32).transpose((1, 2, 0)), boxes, labels, seglbl
class ToTensor(object):
def __call__(self, cvimage, boxes=None, labels=None, seglbl=None):
return torch.from_numpy(cvimage.astype(np.float32)).permute(2, 0, 1), boxes, labels, seglbl
# 随机裁剪
class RandomSampleCrop(object):
"""Crop
Arguments:
img (Image): the image being input during training
boxes (Tensor): the original bounding boxes in pt form
labels (Tensor): the class labels for each bbox
*seglbl (?? Image: P): 也将对应的语义分割标注对应过来,这里可能不好调,做好心理准备
seglbl需要保证格式不变,是P模式,之后转换成tensor
mode (float tuple): the min and max jaccard overlaps
Return:
(img, boxes, classes)
img (Image): the cropped image
boxes (Tensor): the adjusted bounding boxes in pt form
labels (Tensor): the class labels for each bbox
seglbl (Tensor): the seglbl for image
"""
def __init__(self):
self.sample_options = (
# using entire original input image
None,
# sample a patch s.t. MIN jaccard w/ obj in .1,.3,.4,.7,.9
(0.1, None),
(0.3, None),
(0.7, None),
(0.9, None),
# randomly sample a patch
(None, None),
)
def __call__(self, image, boxes=None, labels=None, seglbl=None):
height, width, _ = image.shape
while True:
# randomly choose a mode
mode = random.choice(self.sample_options)
if mode is None:
return image, boxes, labels, seglbl
min_iou, max_iou = mode
if min_iou is None:
min_iou = float('-inf')
if max_iou is None:
max_iou = float('inf')
# max trails (50) ?? 如何理解?
for _ in range(50):
current_image = image
current_seglbl = seglbl
# print("before: ")
# print(current_seglbl.shape)
# print(current_image.shape)
w = random.uniform(0.3 * width, width)
h = random.uniform(0.3 * height, height)
# aspect ratio constraint b/t .5 & 2 这里对aspect ratio进行了限制,具体意义呢?
# 是对随机裁剪的图像进行限制
if h / w < 0.5 or h / w > 2:
continue
left = random.uniform(width - w)
top = random.uniform(height - h)
# convert to integer rect x1,y1,x2,y2
rect = np.array([int(left), int(top), int(left+w), int(top+h)])
# calculate IoU (jaccard overlap) b/t the cropped and gt boxes
overlap = jaccard_numpy(boxes, rect)
# is min and max overlap constraint satisfied? if not try again
if overlap.min() < min_iou and max_iou < overlap.max():
continue
# cut the crop from the image
current_image = current_image[rect[1]:rect[3], rect[0]:rect[2],:]
# print("current_image type: ")
# print(type(current_image))
# 使用seglbl,要确保seglbl的格式不会改变
# print(seglbl.shape)
current_seglbl = current_seglbl[rect[1]:rect[3], rect[0]:rect[2]]
# print("current_seglbl shape: after")
# print(current_seglbl.shape)
# print(current_image.shape)
# keep overlap with gt box IF center in sampled patch
centers = (boxes[:, :2] + boxes[:, 2:]) / 2.0
# mask in all gt boxes that above and to the left of centers
m1 = (rect[0] < centers[:, 0]) * (rect[1] < centers[:, 1])
# mask in all gt boxes that under and to the right of centers
m2 = (rect[2] > centers[:, 0]) * (rect[3] > centers[:, 1])
# mask in that both m1 and m2 are true
mask = m1 * m2
# have any valid boxes? try again if not
if not mask.any():
continue
# take only matching gt boxes
current_boxes = boxes[mask, :].copy()
# take only matching gt labels
current_labels = labels[mask]
# should we use the box left and top corner or the crop's
current_boxes[:, :2] = np.maximum(current_boxes[:, :2],
rect[:2])
# adjust to crop (by substracting crop's left,top)
current_boxes[:, :2] -= rect[:2]
current_boxes[:, 2:] = np.minimum(current_boxes[:, 2:],
rect[2:])
# adjust to crop (by substracting crop's left,top)
current_boxes[:, 2:] -= rect[:2]
# print(type(current_seglbl)) # "numpy"
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/RandomSampleCrop/" + str(_) + ".png",
# current_image)
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/RandomSampleCrop_seglbl/" + str(_) + ".png",
# current_seglbl)
return current_image, current_boxes, current_labels, current_seglbl
class Expand(object):
def __init__(self, mean):
self.mean = mean
def __call__(self, image, boxes, labels, seglbl):
if random.randint(2):
return image, boxes, labels, seglbl
height, width, depth = image.shape
ratio = random.uniform(1, 4)
left = random.uniform(0, width*ratio - width)
top = random.uniform(0, height*ratio - height)
expand_image = np.zeros(
(int(height*ratio), int(width*ratio), depth),
dtype=image.dtype)
expand_image[:, :, :] = self.mean
expand_image[int(top):int(top + height),
int(left):int(left + width)] = image
image = expand_image
# 对seglbl进行对应处理
s_h, s_w = seglbl.shape
expand_seglbl = np.zeros((int(s_h*ratio), int(s_w*ratio)), dtype=seglbl.dtype)
#expand_seglbl[:, :] = [0,0] # 扩展的其他位置都是黑色,
expand_seglbl[int(top):int(top + height),
int(left):int(left + width)] = seglbl
seglbl = expand_seglbl
boxes = boxes.copy()
boxes[:, :2] += (int(left), int(top))
boxes[:, 2:] += (int(left), int(top))
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/Expand")
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/Expand_seglbl")
#
# im_name = random.randint(1, 1000000)
#
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/Expand/" + str(im_name) + ".png",
# image)
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/Expand_seglbl/" + str(im_name) + ".png",
# seglbl)
return image, boxes, labels, seglbl
class RandomMirror(object):
def __call__(self, image, boxes, labels, seglbl):
_, width, _ = image.shape
if random.randint(2):
image = image[:, ::-1]
boxes = boxes.copy()
boxes[:, 0::2] = width - boxes[:, 2::-2]
seglbl = seglbl[:, ::-1] # 这就是镜像操作吗?确认一下
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/RandomMirror")
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/RandomMirror_seglbl")
#
# im_name = random.randint(1, 1000000)
#
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/RandomMirror/" + str(im_name) + ".png",
# image)
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/RandomMirror_seglbl/" + str(im_name) + ".png",
# seglbl)
return image, boxes, labels, seglbl
class SwapChannels(object):
"""Transforms a tensorized image by swapping the channels in the order
specified in the swap tuple.
Args:
swaps (int triple): final order of channels
eg: (2, 1, 0)
"""
def __init__(self, swaps):
self.swaps = swaps
def __call__(self, image):
"""
Args:
image (Tensor): image tensor to be transformed
Return:
a tensor with channels swapped according to swap
"""
# if torch.is_tensor(image):
# image = image.data.cpu().numpy()
# else:
# image = np.array(image)
image = image[:, :, self.swaps]
return image
# 图像矩阵扭曲
class PhotometricDistort(object):
def __init__(self):
self.pd = [
RandomContrast(),
ConvertColor(transform='HSV'),
RandomSaturation(),
RandomHue(),
ConvertColor(current='HSV', transform='BGR'),
RandomContrast()
]
self.rand_brightness = RandomBrightness()
self.rand_light_noise = RandomLightingNoise()
def __call__(self, image, boxes, labels, seglbl):
im = image.copy()
im, boxes, labels, seglbl = self.rand_brightness(im, boxes, labels, seglbl)
if random.randint(2):
distort = Compose(self.pd[:-1])
else:
distort = Compose(self.pd[1:])
im, boxes, labels, seglbl = distort(im, boxes, labels, seglbl)
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/PhotometricDistort")
# mkdir("/home/user/Disk1.8T/draw_result/augmentations/PhotometricDistort_seglbl")
#
# im_name = random.randint(1, 1000000)
#
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/PhotometricDistort/" + str(im_name) + ".png",
# image)
# cv2.imwrite(
# "/home/user/Disk1.8T/draw_result/augmentations/PhotometricDistort_seglbl/" + str(im_name) + ".png",
# seglbl)
return self.rand_light_noise(im, boxes, labels, seglbl)
class SSDAugmentation(object):
def __init__(self, size=[960,540], mean=(118.3, 118.1, 112.5)):
self.mean = mean
self.size = size
self.augment = Compose([
ConvertFromInts(),
ToAbsoluteCoords(),
PhotometricDistort(),
Expand(self.mean),
RandomSampleCrop(),
RandomMirror(),
ToPercentCoords(),
Resize(self.size),
SubtractMeans(self.mean)
])
def __call__(self, img, boxes, labels, seglbl):
return self.augment(img, boxes, labels, seglbl)
| [
"unicoe@163.com"
] | unicoe@163.com |
d4e45f4c40639ad0d1e9321c4c0fd0b810fff068 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2647/60769/262538.py | 2466f6b8a6a58970730c5979bc7b6fb85ac62845 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | num = int(input())
for j in range(num):
nn = int(input())
bi = str(bin(nn))[2:]
sum = 0
for i in range(len(bi)):
if bi[i] == "1":
sum += 1
print(sum) | [
"1069583789@qq.com"
] | 1069583789@qq.com |
6e4138165c539d1808e0fac90a8ffe05f6fbe529 | ea2e8ea702601f41c261d14e796f3951f8b2dbb5 | /skfcms_project/urls.py | 857c6b16ed79c315cbf3d1de0789cf38606a681c | [] | no_license | kamruljpi/ecommerce | cb7a064d3cf2d902fd49e54ed1d1842aeb4cfa48 | e7c3062429e5224fcfb6c3c57a59763b7b9901c3 | refs/heads/master | 2021-07-11T02:02:17.586644 | 2017-10-05T19:43:51 | 2017-10-05T19:43:51 | 105,932,841 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | """skfcms_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from ecom_app import urls as ecom_urls
urlpatterns = [
url(r'^', include(ecom_urls, namespace='ecom_app')),
]
| [
"="
] | = |
97406c712d00d3c0c18c7e45aa341901ed3af6c5 | 9f0e740c6486bcb12f038c443b039c886124e55c | /python-study/tools/verifycode/verify_code_2.py | b3d0462c8d289321f79dfb20a916f28991924189 | [] | no_license | zfanai/python-study | 373ff09bd1e6be9e098bde924c98f5277ad58a54 | de11a6c8018730bb27e26808f5cbc0c615b4468f | refs/heads/master | 2021-01-18T17:59:16.817832 | 2017-11-06T09:33:21 | 2017-11-06T09:33:21 | 86,831,175 | 1 | 0 | null | null | null | null | GB18030 | Python | false | false | 4,269 | py | #encoding:gbk
import random
from PIL import Image, ImageDraw, ImageFont, ImageFilter
_letter_cases = "abcdefghjkmnpqrstuvwxy" # 小写字母,去除可能干扰的i,l,o,z
_upper_cases = _letter_cases.upper() # 大写字母
_numbers = ''.join(map(str, range(3, 10))) # 数字
init_chars = ''.join((_letter_cases, _upper_cases, _numbers))
def create_validate_code(size=(120, 30),
chars=init_chars,
img_type="GIF",
mode="RGB",
bg_color=(255, 255, 255),
fg_color=(0, 0, 255),
font_size=18,
font_type="ae_AlArabiya.ttf",
length=4,
draw_lines=True,
n_line=(1, 2),
draw_points=True,
point_chance = 2):
'''
@todo: 生成验证码图片
@param size: 图片的大小,格式(宽,高),默认为(120, 30)
@param chars: 允许的字符集合,格式字符串
@param img_type: 图片保存的格式,默认为GIF,可选的为GIF,JPEG,TIFF,PNG
@param mode: 图片模式,默认为RGB
@param bg_color: 背景颜色,默认为白色
@param fg_color: 前景色,验证码字符颜色,默认为蓝色#0000FF
@param font_size: 验证码字体大小
@param font_type: 验证码字体,默认为 ae_AlArabiya.ttf
@param length: 验证码字符个数
@param draw_lines: 是否划干扰线
@param n_lines: 干扰线的条数范围,格式元组,默认为(1, 2),只有draw_lines为True时有效
@param draw_points: 是否画干扰点
@param point_chance: 干扰点出现的概率,大小范围[0, 100]
@return: [0]: PIL Image实例
@return: [1]: 验证码图片中的字符串
'''
width, height = size # 宽, 高
img = Image.new(mode, size, bg_color) # 创建图形
draw = ImageDraw.Draw(img) # 创建画笔
def get_chars():
'''生成给定长度的字符串,返回列表格式'''
return random.sample(chars, length)
def create_lines():
'''绘制干扰线'''
line_num = random.randint(*n_line) # 干扰线条数
for i in range(line_num):
# 起始点
begin = (random.randint(0, size[0]), random.randint(0, size[1]))
#结束点
end = (random.randint(0, size[0]), random.randint(0, size[1]))
draw.line([begin, end], fill=(0, 0, 0))
def create_points():
'''绘制干扰点'''
chance = min(100, max(0, int(point_chance))) # 大小限制在[0, 100]
for w in xrange(width):
for h in xrange(height):
tmp = random.randint(0, 100)
if tmp > 100 - chance:
draw.point((w, h), fill=(0, 0, 0))
def create_strs():
'''绘制验证码字符'''
c_chars = get_chars()
strs = ' %s ' % ' '.join(c_chars) # 每个字符前后以空格隔开
font = ImageFont.truetype(font_type, font_size)
font_width, font_height = font.getsize(strs)
draw.text(((width - font_width) / 3, (height - font_height) / 3),
strs, font=font, fill=fg_color)
return ''.join(c_chars)
if draw_lines:
create_lines()
if draw_points:
create_points()
strs = create_strs()
# 图形扭曲参数
params = [1 - float(random.randint(1, 2)) / 100,
0,
0,
0,
1 - float(random.randint(1, 10)) / 100,
float(random.randint(1, 2)) / 500,
0.001,
float(random.randint(1, 2)) / 500
]
img = img.transform(size, Image.PERSPECTIVE, params) # 创建扭曲
img = img.filter(ImageFilter.EDGE_ENHANCE_MORE) # 滤镜,边界加强(阈值更大)
return img, strs
if __name__ == "__main__":
code_img,code = create_validate_code(font_type="arial.ttf")
#code_img.save("validate.gif", "GIF")
code_img.save("validate.jpg", "JPEG")
print code | [
"zf_sch@126.com"
] | zf_sch@126.com |
55ca65b0efa49bd99c0050951177f9a7bacb5efa | 81406671f82b4fb13f4936eebe20d4f4ff954177 | /ltr/ltr/settings.py | 1e9f5ee259a312fd14f78e4d748a09134de75f77 | [] | no_license | thatandromeda/ltr | a6e98832015daee26c2fb208c9d858a72ffbf8bd | 7d7790444acccf476c90f415bb88d96216948a15 | refs/heads/master | 2021-01-21T12:52:48.521458 | 2014-12-09T20:26:32 | 2014-12-09T20:26:32 | 19,824,699 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,249 | py | """
Django settings for ltr project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'lr72a%5hsvgves1+959cn*q=t$iuj#d!#002_2w!&+9@=ai)*%'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'taggit',
'south',
'ltr',
'crispy_forms',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ltr.urls'
WSGI_APPLICATION = 'ltr.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
# Templates
def rel(*x):
return os.path.abspath(os.path.join(BASE_DIR, *x))
TEMPLATE_DIRS = (
rel('templates'),
)
# required for taggit
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
} | [
"andromeda.yelton@gmail.com"
] | andromeda.yelton@gmail.com |
ef9505fa441399af3d3e976ecb1cfb158d2a657b | 62ffd1d0ca1a325988e0b89341e8ae5236ddd127 | /Android_Tensorflow/testTF/TF_sotmax.py | 99a21e0ba31d9a88697c43389764795aacb8f427 | [] | no_license | curryli/APP_Action | 3edc6c88590869fabdc41729eb228b29adc5024f | 06a9ecc7961caf6760c588bd7e060041cdc8b2a7 | refs/heads/master | 2021-09-09T19:22:58.794998 | 2018-03-19T07:01:28 | 2018-03-19T07:01:28 | 114,858,653 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,209 | py | # -*- coding: utf-8 -*-
import tensorflow as tf
from sklearn.datasets import load_iris
import numpy as np
import pandas as pd
iris=load_iris()
iris_data=iris.data
iris_target=iris.target
iris_target1=pd.get_dummies(iris_target).values
print(iris_data.shape)
X=iris_data
print(X.shape)
x=tf.placeholder(dtype=tf.float32,shape=[None,4],name="input")
y=tf.placeholder(dtype=tf.float32,shape=[None,3],name="output") #三分类
w=tf.get_variable("weight",shape=[4,3],dtype=tf.float32,initializer=tf.truncated_normal_initializer(stddev=0.1))
bais=tf.get_variable("bais",shape=[3],dtype=tf.float32,initializer=tf.constant_initializer(0))
y_out=tf.nn.bias_add(tf.matmul(x,w),bais)
loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y,logits=y_out))
accuracy=tf.reduce_mean(tf.cast(tf.equal(tf.arg_max(y,1),tf.arg_max(y_out,1)),tf.float32))
train_step=tf.train.AdamOptimizer().minimize(loss)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(3001):
sess.run(train_step,feed_dict={x:X,y:iris_target1})
if i%500==0:
accuracy_print=sess.run(accuracy,feed_dict={x:X,y:iris_target1})
print(accuracy_print)
| [
"xurui.lee@msn.com"
] | xurui.lee@msn.com |
b3e012396df4d1b6ef5cc53c65309913c6a1fb2d | d37a19ab3bcaba6e808a18df411c653c644d27db | /Year2/CA268/Week03/suspicious.py | 81f85e716f34f9fb63756c14c9cfe66ae0744652 | [] | no_license | Andrew-Finn/DCU | 9e7009dac9a543aaade17e9e94116259dcc1de20 | 013789e8150d80d3b3ce2c0c7ba968b2c69a7ce0 | refs/heads/master | 2023-02-21T05:13:42.731828 | 2022-02-14T12:39:20 | 2022-02-14T12:39:20 | 157,438,470 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | py | import sys
with open(sys.argv[1], "right") as f:
students = f.readlines()
with open(sys.argv[2], "right") as f:
bad = f.readlines()
out = sorted(list(set(students).intersection(set(bad))))
for i in range(len(out)):
print("{}. {}".format(i + 1, out[i]), end="")
| [
"git@afinn.me"
] | git@afinn.me |
d39b2b73aac648eca88713fd542c196b8c01e4d5 | e578b27fe0f8a47931e459c162cd699c6338a862 | /payments/management/commands/send_payment_reminders.py | 8fc399ff0e2e74d9f974963bd7201b9ba5ef38ad | [
"MIT"
] | permissive | City-of-Helsinki/berth-reservations | 0ded0448cc3a9391b6c1bde61767633b34124e80 | d1983366452eff9714d425bcef2d7d78483738f8 | refs/heads/master | 2023-04-06T06:49:31.433852 | 2023-03-09T12:38:46 | 2023-03-09T12:41:29 | 153,620,377 | 5 | 2 | MIT | 2023-03-21T22:56:51 | 2018-10-18T12:30:27 | Python | UTF-8 | Python | false | false | 398 | py | from payments.models import Order
from utils.base_expiration_command import FeatureFlagCommand
class Command(FeatureFlagCommand):
help = "Send payment reminder notifications"
feature_flag_name = "PAYMENTS_REMINDER_NOTIFICATION_CRONJOB_ENABLED"
def run_operation(self, dry_run, **options) -> int:
return Order.objects.send_payment_reminders_for_unpaid_orders(dry_run=dry_run)
| [
"juha.louhiranta@anders.fi"
] | juha.louhiranta@anders.fi |
572f8a2ea05c299ebe1dd4161f260cb554d88273 | db80edb9be895c4ebcb9acac96eff92b7fda2bd3 | /src/utils/dsp_tools.py | 5fa52563df09a03e05fbee4e59d8a7b3e4622215 | [] | no_license | YhHoo/AE-signal-model | 9950182425377364d83a8a86b72ed181b789a599 | 8ba384397a88ea8316deee3173503fccb9e485af | refs/heads/master | 2021-06-26T03:43:21.482586 | 2019-05-16T10:53:25 | 2019-05-16T10:53:25 | 131,477,937 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16,907 | py | # ------------------------------------------------------
# Process the Raw AE signals for training-ready
# The Raw signal is sampled at 5MHz, So time btw points = 2e-7 s
# ------------------------------------------------------
import pywt
import numpy as np
import matplotlib.pyplot as plt
from scipy.fftpack import fft
from scipy.signal import spectrogram, correlate
from statsmodels.robust import mad
from scipy.signal import filtfilt, butter
from sklearn.preprocessing import MinMaxScaler
# FAST FOURIER TRANSFORM (FFT)
def fft_scipy(sampled_data=None, fs=1, visualize=True, vis_max_freq_range=None):
'''
:param sampled_data: A one dimensional data (Size = N), can be list or series
:param fs: Sampling frequency
:param visualize: Plot or not (Boolean)
:param vis_max_freq_range: the maximum freq to include in visualization
:return: amplitude and the frequency spectrum (Size = N // 2)
'''
# visualize freq
if vis_max_freq_range is None:
vis_max_freq_range = fs/2
# Sample points and sampling frequency
# N = sampled_data.size # deprecated on 6/9/18
N = len(sampled_data)
# fft
# print('Scipy.FFT on {} points...'.format(N), end='')
# take only half of the FFT output because it is a reflection
# take abs because the FFT output is complex
# divide by N to reduce the amplitude to correct one
# times 2 to restore the discarded reflection amplitude
y_fft = fft(sampled_data)
y_fft_mag = (2.0/N) * np.abs(y_fft[0: N//2])
y_fft_phase = np.angle(y_fft[0: N//2])
# x-axis - only half of N
f_axis = np.linspace(0.0, fs/2, N//2)
if visualize:
# use sci. notation at the x-axis value
plt.subplots_adjust(hspace=0.5)
plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
# plot only 0Hz to specified freq
plt.xlim((0, vis_max_freq_range))
# mag plot
plt.subplot(211)
plt.plot(f_axis, y_fft_mag)
plt.xlabel('Frequency [Hz]')
plt.ylabel('Magnitude')
plt.title('Fast Fourier Transform')
# phase plot
plt.subplot(212)
plt.plot(f_axis, y_fft_phase)
plt.xlabel('Frequency [Hz]')
plt.ylabel('Phase')
plt.show()
return y_fft_mag, y_fft_phase, f_axis
# SPECTROGRAM
def spectrogram_scipy(sampled_data=None, fs=1, nperseg=1, noverlap=1, nfft=None, mode='psd',
return_plot=False, vis_max_freq_range=None, verbose=False,
save=False, plot_title='Default'):
'''
:param sampled_data: A one dimensional data (Size = N), can be list or series
:param fs: Sampling frequency
:param nperseg: if higher, f-res higher, no of freq bin = nperseg/2 + 1 !!
:param noverlap: if higher, t-res higher
:param nfft: if set to 500, and if nperseg is 400, 100 zeros will be added to the 400 points signal to make it 500.
this is to simply increase the segmented length so that more freq res is obtained.
:param mode: 'psd', 'magnitude', 'angle'(deg), 'phase'(rad), 'complex'
:param return_plot: return figure object of the spectrogram plot, if False, the returned obj
wil become none
:param verbose: Print out the transformed data summary
:param save: save the spectrogram as .jpeg
:param plot_title: title of the spectrogram to save
:param vis_max_freq_range: the maximum freq to include in visualization
:return: time axis, frequency band and the 2D matrix(shape[0]=freq, shape[1]=time step)
'''
# There is a trade-off btw resolution of frequency and time due to uncertainty principle
# Spectrogram split input signal into segments before FFT and PSD on each seg.
# Adjust nperseg is adjusting segment length. Higher nperseg giv more res in Freq but
# lesser res in time domain.
# ensure it is np array
if isinstance(sampled_data, list):
sampled_data = np.array(sampled_data)
# check the visualize freq range
if vis_max_freq_range is None:
vis_max_freq_range = fs / 2
# begin
f, t, Sxx = spectrogram(sampled_data,
fs=fs,
scaling='spectrum',
nperseg=nperseg,
noverlap=noverlap,
nfft=nfft,
mode=mode)
f_res = fs / (2 * (f.size - 1))
t_res = (sampled_data.shape[0] / fs) / t.size
# result summary
if verbose:
print('\n----------SPECTROGRAM OUTPUT---------')
print('Time Segment....{}\nFirst 5: {}\nLast 5: {}\n'.format(t.size, t[:5], t[-5:]))
print('Frequency Segment....{}\nFirst 5: {}\nLast 5: {}\n'.format(f.size, f[:5], f[-5:]))
print('Spectrogram Dim: {}\nF-Resolution: {}Hz/Band\nT-Resolution: {}'.format(Sxx.shape, f_res, t_res))
# plotting spectrogram
if return_plot:
fig = plt.figure(figsize=(8, 6)) # (x len, y len)
fig.suptitle(plot_title)
ax = fig.add_axes([0.1, 0.1, 0.6, 0.8])
colorbar_ax = fig.add_axes([0.7, 0.1, 0.05, 0.8])
i = ax.pcolormesh(t, f, Sxx)
fig.colorbar(i, cax=colorbar_ax)
ax.grid()
ax.set_xlabel('Time [Sec]')
ax.set_ylabel('Frequency [Hz]')
ax.set_ylim(bottom=0, top=vis_max_freq_range, auto=True)
ax.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
else:
fig = None
# [THIS METHOD OF PLOT IS OBSOLETE]
# if save or return_plot:
# plt.pcolormesh(t, f, Sxx)
# plt.ylabel('Frequency [Hz]')
# # display only 0Hz to 300kHz
# plt.ylim((0, vis_max_freq_range))
# plt.ticklabel_format(style='sci', axis='y', scilimits=(0, 0))
# plt.ticklabel_format(style='sci', axis='x', scilimits=(0, 0))
# plt.title(save_title)
# plt.grid()
# plt.xlabel('Time [Sec]')
# plt.colorbar()
# if save:
# plt.savefig('result\{}.png'.format(save_title))
#
# if return_plot:
# plt.show()
# plt.close()
return t, f, Sxx, fig
def butter_bandpass_filtfilt(sampled_data, fs, f_hicut, f_locut, order=5):
'''
:param sampled_data: input
:param fs: at wat f the data is sampled
:param f_hicut: higher boundary of the passband
:param f_locut: lower boundary of the passband
:param order: the higher the order the higher the Q
:return: np array
'''
f_nyquist = fs / 2
low = f_locut / f_nyquist
high = f_hicut / f_nyquist
b, a = butter(order, [low, high], btype='band') # ignore warning
# using zero phase filter (so no phase shift after filter)
filtered_signal = filtfilt(b, a, sampled_data)
return filtered_signal
def one_dim_xcor_2d_input(input_mat, pair_list, verbose=False):
'''
XCOR every band --> overall normalize
The scipy.signal.correlate is used. Here the algorithm will starts sliding the 2 signals by bumping
input_mat[0]'s head into the input_mat[1]'s tail. E.G. If input_mat[0] is faster, max correlation occurs at
left side of the centre points.
:param input_mat: a 3d np matrix input, where shape[0] -> no. of phase map (diff sensors),
shape[1] -> freq band,
shape[2] -> time steps
:param pair_list: list of 2d tuples, e.g. [(0, 1), (1, 2)], such that input_mat[0] and input_mat[1] is xcor, and
input_mat[1] and input_mat[2] is xcor.
:param verbose: print the output xcor map dimension
:return: 3d normalized xcor map whr shape[0] -> no. of xcor maps,
shape[1] -> freq band,
shape[2] -> xcor steps
'''
# ensure they hv equal number of axis[1] or freq band
try:
# simply accessing
input_mat.shape[1]
except IndexError:
print('YH_WARNING: The axis[1] of the input_mat are not uniform')
raise
xcor_bank = []
for pair in pair_list:
xcor_of_each_f_list = []
# for all feature(freq/wavelet width) bands
for i in range(input_mat.shape[1]):
x_cor = correlate(input_mat[pair[0], i], input_mat[pair[1], i], 'full', method='fft')
xcor_of_each_f_list.append(x_cor)
# xcor map of 2 phase map, axis[0] is freq, axis[1] is x-cor unit shift
xcor_of_each_f_list = np.array(xcor_of_each_f_list)
# normalize each xcor_map with linear function btw their max and min values
scaler = MinMaxScaler(feature_range=(0, 1))
xcor_of_each_f_list = scaler.fit_transform(xcor_of_each_f_list.ravel().reshape((-1, 1))) \
.reshape((xcor_of_each_f_list.shape[0], xcor_of_each_f_list.shape[1]))
# store the xcor map for a pair of phase map
xcor_bank.append(xcor_of_each_f_list)
xcor_bank = np.array(xcor_bank)
# xcor axis
xcor_len = xcor_bank.shape[2]
xcor_axis = np.arange(1, xcor_len + 1, 1) - xcor_len // 2 - 1
if verbose:
print('\n---------One-Dimensional X-correlation---------')
print('Xcor Map Dim (No. of xcor map, freq band, xcor steps): ', xcor_bank.shape)
return xcor_bank, xcor_axis
def one_dim_xcor_1d_input(input_mat, pair_list, verbose=False):
'''
This is same as the one_dim_xcor_2d_input(), except here takes only 1 frequency band, and no normalize of the
xcor result.
:param input_mat: a 2d np matrix input, where shape[0] -> no. of phase map (diff sensors),
shape[1] -> time steps
:param pair_list: list of 2d tuples, e.g. [(0, 1), (1, 2)], such that input_mat[0] and input_mat[1] is xcor, and
input_mat[1] and input_mat[2] is xcor.
:param verbose: print the output xcor map dimension
:return: 3d normalized xcor map whr shape[0] -> no. of xcor maps,
shape[1] -> xcor steps
'''
xcor_bank = []
for pair in pair_list:
x_cor = correlate(input_mat[pair[0]], input_mat[pair[1]], 'full', method='fft')
# store the xcor map for a pair of phase map
xcor_bank.append(x_cor[0])
xcor_bank = np.array(xcor_bank)
# xcor axis
xcor_len = xcor_bank.shape[1]
xcor_axis = np.arange(1, xcor_len + 1, 1) - xcor_len // 2 - 1
if verbose:
print('\n---------One-Dimensional X-correlation---------')
print('Xcor Map Dim (No. of xcor map, freq band, xcor steps): ', xcor_bank.shape)
return xcor_bank, xcor_axis
def dwt_smoothing(x, wavelet="db4", level=1):
'''
:param x: 1d array / list of points (1d signal)
:param wavelet: wavelet of pywt
:param level: the approximate level that is taken to calc the universal threshold for threshold-ing
:return: 1d array
'''
# calculate the wavelet coefficients
coeff = pywt.wavedec(x, wavelet, mode="per")
# calculate a threshold (Median Absolute Deviation) - find deviation of every item from median in a 1d array, then
# find the median of the deviation again.
sigma = mad(coeff[-level])
# changing this threshold also changes the behavior. This is universal threshold
uthresh = sigma * np.sqrt(2 * np.log(len(x)))
# thresholding on all the detail components, using universal threshold from the level (-level)
coeff[1:] = (pywt.threshold(i, value=uthresh, mode="soft") for i in coeff[1:])
# reconstruct the signal using the thresholded coefficients
x_smoothed = pywt.waverec(coeff, wavelet, mode="per")
return x_smoothed
def detect_ae_event_by_sandwich_sensor(x1, x2, threshold1, threshold2):
'''
Recommended usage:
It is usually to process the peaks list by peakutils.indexes() from 2 sensors at SAME dist away fr AE source.
For items in x1 and x2, if any items of x1 and x2 are closer than thre1, it will store the x2's value.
e.g. if x1 has 34, and x2 has 35, it will only keep index from x2, which is 35. After that, if the items in
resulting list are still too close by thre2, it will remove the smaller index.
example --
x1 = [1, 13, 18, 20, 34]
x2 = [0, 15, 17, 50]
thre1 = 2
thre2 = 3
first, it will produce [0, 15, 17] --> closely similiar values
then, it will produce [0, 15] --> remove the index that are closely similar within itself
:param x1: a 1d array of list
:param x2: a 1d array of list
:param threshold1: for x1 and x2 similar point selection
:param threshold2: for too-close-point removal
:return: a list
'''
item_to_stay = []
# finding closely similiar points btw x1 and x2
for p1 in x1:
for p2 in x2:
abs_diff = np.abs(p2 - p1)
if abs_diff < threshold1:
item_to_stay.append(p2)
break
# for the resulting list of items from above, remove the items are too close (differ within threshold2).
item_to_del = []
# note down either one of the 2 items tat too close in a delete list
for i in range(len(item_to_stay) - 1):
if np.abs(item_to_stay[i + 1] - item_to_stay[i]) < threshold2:
item_to_del.append(item_to_stay[i])
# remove items according to the delete list
for d in item_to_del:
item_to_stay.remove(d)
return item_to_stay
def detect_ae_event_by_v_sensor(x1, x2, x3, x4, n_ch_signal, threshold_list=None, threshold_x=None):
'''
This function is specific for processing 4 lists of peak indexes from peakutils.indexes()
Algorithm:
It is expecting [x1, x2, x3, x4] as [-3m, -2m, 2m, 4m], and the ae event
is from 0m.
First, it finds similiar peaks from -2m and 2m --> abs([-2m]-[2m]) < threshold[0]
Then, it takes the midpoints of the similiar peak from -2m and 2m as mid.
It then finds points from x1 that are > mid and has distance less than threshold[1]. if yes, set flag_0 = True
Last, it finds points from x4 that are > mid and has distance less than threshold[2]. if yes, set flag_1 = True
For mid with both flag_0 and flag_1 on, it is stored and returned.
:param x1: a list of peaks indexs
:param x2: a list of peaks indexs
:param x3: a list of peaks indexs
:param x4: a list of peaks indexs
:param threshold_list: a list of 3 integers
:param threshold_x: a single value
:return: a list of indexes where AE event is detected
'''
ae_peak, ae_peak_confirmed = [], []
ae_peak_lo, ae_peak_hi = [], []
flag_0, flag_1 = False, False
# Stage 1 ----------------------------------------------------------------------------------------------------------
# finding [0m]-emitted peak btw x1 and x2
# we allow a very small dist difference btw the 2 peaks
for p2 in x2:
for p3 in x3:
abs_diff = np.abs(p3 - p2)
if abs_diff < threshold_list[0]:
# find the mid point
ae_peak.append(np.mean((p2, p3)))
ae_peak_lo.append(p2)
ae_peak_hi.append(p3)
break
# Stage 2 ----------------------------------------------------------------------------------------------------------
# for all mid detected
for mid, mid_lo, mid_hi in zip(ae_peak, ae_peak_lo, ae_peak_hi):
# check mid with x1
for p1 in x1:
if p1 > mid:
# satisfying distance and amplitude criteria
if ((p1-mid) < threshold_list[1]) and (n_ch_signal[1, mid_lo] > n_ch_signal[0, p1]):
flag_0 = True
# exit for loop once a match is found
break
# check mid with x4
for p4 in x4:
if p4 > mid:
# satisfying distance and amplitude criteria
if (p4-mid) < threshold_list[2] and (n_ch_signal[2, mid_hi] > n_ch_signal[0, p4]):
flag_1 = True
# exit for loop once a match is found
break
# verify the mid with flag 1 and 2
if flag_0 and flag_1:
ae_peak_confirmed.append(int(mid))
# reset flag to 0
flag_0, flag_1 = False, False
# Stage 3 ----------------------------------------------------------------------------------------------------------
# remove the peak are too close (differ within threshold_x).
item_to_del = []
# note down either one of the 2 items tat too close in a delete list
for i in range(len(ae_peak_confirmed) - 1):
if np.abs(ae_peak_confirmed[i + 1] - ae_peak_confirmed[i]) < threshold_x:
item_to_del.append(ae_peak_confirmed[i])
# remove items according to the delete list
for d in item_to_del:
ae_peak_confirmed.remove(d)
return ae_peak_confirmed
| [
"hooyuheng@gmail.com"
] | hooyuheng@gmail.com |
dfb05342707d079df69d72120d8cdf801bbe6daf | ef6229d281edecbea3faad37830cb1d452d03e5b | /ucsmsdk/mometa/sysdebug/SysdebugEp.py | 3525c9442aa33eb9bcc4dbf25dc0ecf3287dd24d | [
"Apache-2.0"
] | permissive | anoop1984/python_sdk | 0809be78de32350acc40701d6207631322851010 | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | refs/heads/master | 2020-12-31T00:18:57.415950 | 2016-04-26T17:39:38 | 2016-04-26T17:39:38 | 57,148,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,036 | py | """This module contains the general information for SysdebugEp ManagedObject."""
import sys, os
from ...ucsmo import ManagedObject
from ...ucscoremeta import UcsVersion, MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class SysdebugEpConsts():
pass
class SysdebugEp(ManagedObject):
"""This is SysdebugEp class."""
consts = SysdebugEpConsts()
naming_props = set([])
mo_meta = MoMeta("SysdebugEp", "sysdebugEp", "sysdebug", VersionMeta.Version101e, "InputOutput", 0x1f, [], ["read-only"], [u'topSystem'], [u'sysdebugAutoCoreFileExportTarget', u'sysdebugLogControlEp', u'sysdebugLogExportPolicy'], ["Get"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302a, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"dn": "dn",
"rn": "rn",
"sacl": "sacl",
"status": "status",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.sacl = None
self.status = None
ManagedObject.__init__(self, "SysdebugEp", parent_mo_or_dn, **kwargs)
| [
"test@cisco.com"
] | test@cisco.com |
a1640304c859f34680877002f692057f623b2b28 | cc2fcc1a0c5ea9789f98ec97614d7b25b03ba101 | /st2reactor/tests/unit/test_timer.py | 9b86ac90438b1ac657a18287adc6f29a57a8cc2d | [
"Apache-2.0"
] | permissive | Junsheng-Wu/st2 | 6451808da7de84798641882ca202c3d1688f8ba8 | c3cdf657f7008095f3c68b4132b9fe76d2f52d81 | refs/heads/master | 2022-04-30T21:32:44.039258 | 2020-03-03T07:03:57 | 2020-03-03T07:03:57 | 244,301,363 | 0 | 0 | Apache-2.0 | 2022-03-29T22:04:26 | 2020-03-02T06:53:58 | Python | UTF-8 | Python | false | false | 3,837 | py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bson
import mock
from st2common.constants.triggers import TIMER_TRIGGER_TYPES
from st2common.models.db.trigger import TriggerDB
from st2common.models.system.common import ResourceReference
from st2common.persistence.trigger import TriggerType
from st2common.persistence.trigger import Trigger
from st2reactor.timer.base import St2Timer
from st2tests.base import CleanDbTestCase
class St2TimerTestCase(CleanDbTestCase):
def test_trigger_types_are_registered_on_start(self):
timer = St2Timer()
timer._scheduler = mock.Mock()
# Verify there are no TriggerType in the db when we start
self.assertItemsEqual(TriggerType.get_all(), [])
timer.start()
# Verify TriggerType objects have been created
trigger_type_dbs = TriggerType.get_all()
self.assertEqual(len(trigger_type_dbs), len(TIMER_TRIGGER_TYPES))
timer_trigger_type_refs = TIMER_TRIGGER_TYPES.keys()
for trigger_type in trigger_type_dbs:
ref = ResourceReference(pack=trigger_type.pack, name=trigger_type.name).ref
self.assertTrue(ref in timer_trigger_type_refs)
def test_existing_rules_are_loaded_on_start(self):
# Assert that we dispatch message for every existing Trigger object
St2Timer._handle_create_trigger = mock.Mock()
timer = St2Timer()
timer._scheduler = mock.Mock()
timer._trigger_watcher.run = mock.Mock()
# Verify there are no Trigger and TriggerType in the db wh:w
self.assertItemsEqual(Trigger.get_all(), [])
self.assertItemsEqual(TriggerType.get_all(), [])
# Add a dummy timer Trigger object
type_ = TIMER_TRIGGER_TYPES.keys()[0]
parameters = {'unit': 'seconds', 'delta': 1000}
trigger_db = TriggerDB(id=bson.ObjectId(), name='test_trigger_1', pack='dummy',
type=type_, parameters=parameters)
trigger_db = Trigger.add_or_update(trigger_db)
# Verify object has been added
self.assertEqual(len(Trigger.get_all()), 1)
timer.start()
timer._trigger_watcher._load_thread.wait()
# Verify handlers are called
timer._handle_create_trigger.assert_called_with(trigger_db)
@mock.patch('st2common.transport.reactor.TriggerDispatcher.dispatch')
def test_timer_trace_tag_creation(self, dispatch_mock):
timer = St2Timer()
timer._scheduler = mock.Mock()
timer._trigger_watcher = mock.Mock()
# Add a dummy timer Trigger object
type_ = TIMER_TRIGGER_TYPES.keys()[0]
parameters = {'unit': 'seconds', 'delta': 1}
trigger_db = TriggerDB(name='test_trigger_1', pack='dummy', type=type_,
parameters=parameters)
timer.add_trigger(trigger_db)
timer._emit_trigger_instance(trigger=trigger_db.to_serializable_dict())
self.assertEqual(dispatch_mock.call_args[1]['trace_context'].trace_tag,
'%s-%s' % (TIMER_TRIGGER_TYPES[type_]['name'], trigger_db.name))
| [
"wei.ying@easystack.cn"
] | wei.ying@easystack.cn |
1d29463b299c6b95621395029b066915b15c3e86 | ff2fdb1e7388f74afcf477ceeeec16724a5f5db6 | /my_name/my_name/urls.py | 2c855a4e8efa6f485e3f23f88ade51e1fa21d994 | [] | no_license | asim3/ci-cd-example | fbdedb52a5f4666f1fbbab9d83800830007b4dec | c76cf7ae624364ee595baac6909dbf256af118e7 | refs/heads/master | 2023-05-06T14:44:05.881311 | 2021-06-06T11:48:59 | 2021-06-06T11:48:59 | 374,118,469 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 749 | py | """my_name URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| [
"asimwebapps@gmail.com"
] | asimwebapps@gmail.com |
617e130bde2ab5ea550cd079c100cdc889688e4a | b4cf3c5caacd99d0fb0b864f4ee9f30056a52c05 | /asynch/proto/streams/compressed.py | 30d1327a05f980c8e4c0c56664a2adc3678e37c2 | [
"Apache-2.0"
] | permissive | dbrojas/asynch | 4376ca20e15897e0efe4345402d5d5af3a7c1212 | 94054ba4acb9f0d05ddedf5ae66278b5e5301fdd | refs/heads/master | 2023-03-12T03:24:42.176643 | 2021-02-24T02:18:11 | 2021-02-24T02:18:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,515 | py | from clickhouse_cityhash.cityhash import CityHash128
from asynch.proto.compression import get_decompressor_cls
from asynch.proto.context import Context
from asynch.proto.io import BufferedReader, BufferedWriter
from asynch.proto.streams.native import BlockInputStream, BlockOutputStream
class CompressedBlockOutputStream(BlockOutputStream):
def __init__(
self,
reader: BufferedReader,
writer: BufferedWriter,
context: Context,
compressor_cls,
compress_block_size,
):
super().__init__(reader, writer, context)
self.compressor_cls = compressor_cls
self.compress_block_size = compress_block_size
self.compressor = self.compressor_cls(writer)
def get_compressed_hash(self, data):
return CityHash128(data)
def finalize(self):
await self.writer.flush()
compressed = self.get_compressed()
compressed_size = len(compressed)
compressed_hash = self.get_compressed_hash(compressed)
await self.writer.write_uint128(compressed_hash,)
block_size = self.compress_block_size
i = 0
while i < compressed_size:
await self.writer.write_bytes(compressed[i : i + block_size]) # noqa: E203
i += block_size
await self.writer.flush()
def get_compressed(self):
compressed = BufferedWriter()
if self.compressor.method_byte is not None:
await compressed.write_uint8(self.compressor.method_byte)
extra_header_size = 1 # method
else:
extra_header_size = 0
data = self.compressor.get_compressed_data(extra_header_size)
await compressed.write_bytes(data)
return compressed.buffer
class CompressedBlockInputStream(BlockInputStream):
def __init__(self, reader: BufferedReader, writer: BufferedWriter, context):
super().__init__(reader, writer, context)
def get_compressed_hash(self, data):
return CityHash128(data)
async def read_block(self):
compressed_hash = await self.reader.read_uint128()
method_byte = await self.reader.read_uint8()
decompressor_cls = get_decompressor_cls(method_byte)
decompressor = decompressor_cls(self.reader)
if decompressor.method_byte is not None:
extra_header_size = 1 # method
else:
extra_header_size = 0
return decompressor.get_decompressed_data(method_byte, compressed_hash, extra_header_size)
| [
"long2ice@gmail.com"
] | long2ice@gmail.com |
5b5238cf9854e75b0e44eae065192938b972503d | 95e9ec4b3b0d86063da53a0e62e138cf794cce3a | /python/Django/20190606/press/manage.py | e01e47d4da921d436af23f56b218d8ad7ab18fc9 | [] | no_license | wjl626nice/1902 | c3d350d91925a01628c9402cbceb32ebf812e43c | 5a1a6dd59cdd903563389fa7c73a283e8657d731 | refs/heads/master | 2023-01-05T23:51:47.667675 | 2019-08-19T06:42:09 | 2019-08-19T06:42:09 | 180,686,044 | 4 | 1 | null | 2023-01-04T07:35:24 | 2019-04-11T00:46:43 | Python | UTF-8 | Python | false | false | 625 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'press.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"18537160262@qq.com"
] | 18537160262@qq.com |
49540f1af5bee2a45784e914d60ebd789c1f3138 | 82c6dedfe9040b453c22c3f93f1a2c9a922c988b | /ClusterFind/cluster_dbscan_pyclustering.py | ad11d9af82948f325cf54160c74697b4323c2fbf | [] | no_license | njcuk9999/g_clustering | 8d34439fd78ef7017c0414c932d21cd19fc6551c | 20e6a6ab17c72c5652ae33125f7dabf4131aa8d5 | refs/heads/master | 2021-05-11T16:10:41.382938 | 2018-05-08T22:55:03 | 2018-05-08T22:55:03 | 117,753,516 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,410 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
# CODE NAME HERE
# CODE DESCRIPTION HERE
Created on 2018-01-16 at 14:13
@author: cook
Version 0.0.0
"""
import numpy as np
from astropy.table import Table
import random
import matplotlib.pyplot as plt
from pyclustering.cluster import cluster_visualizer
from pyclustering.cluster.dbscan import dbscan
from pyclustering.utils import read_sample
from pyclustering.utils import timedcall
from pyclustering.samples.definitions import SIMPLE_SAMPLES, FCPS_SAMPLES
# =============================================================================
# Define variables
# =============================================================================
# Define paths
WORKSPACE = '/scratch/Projects/Gaia_clustering'
WRITEPATH = WORKSPACE + '/data/Sim/Simulation_simple.fits'
# -----------------------------------------------------------------------------
COLOURS = ['r', 'g', 'b', 'c', 'm', 'orange']
MARKERS = ['o', 's', '*', 'd', 'v', '<', '>', '^', 'h', 'D', 'p', '8']
SUBSET = True
SUBSETSIZE = 100000
DIMNAMES = ['X [pc]', 'Y [pc]', 'Z [pc]',
'U [mas/yr]', 'V [mas/yr]', 'W [mas/yr]']
# =============================================================================
# Define functions
# =============================================================================
def get_random_choices(array, num):
mask = random.choices(range(len(array)), k=num)
return mask
def optimal_grid(num):
# get maximum shape
shape = int(np.ceil(np.sqrt(num)))
# get number of rows and columns based on maximum shape
if shape ** 2 == num:
nrows = shape
ncols = shape
else:
nrows = int(np.ceil(num / shape))
ncols = int(np.ceil(num / nrows))
# get position of figures
pos = []
for i in range(nrows):
for j in range(ncols):
pos.append([i, j])
# return nrows, ncols and positions
return nrows, ncols, pos
def plot_selection(data, clusters, noise):
plt.scatter(data[:, 0], data[:, 1], marker='.')
clustermask = get_mask(clusters)
noisemask = np.array(noise)
plt.scatter(data[:, 0][noisemask], data[:, 1][noisemask],
color='k', marker='.', s=1)
plt.scatter(data[:, 0][clustermask], data[:, 1][clustermask],
color='r', marker='x')
plt.show()
plt.close()
def get_mask(ll):
mask = []
for l in range(len(ll)):
mask = np.append(mask, ll[l])
mask = np.array(mask, dtype=int)
return mask
# =============================================================================
# Start of code
# =============================================================================
# Main code here
if __name__ == "__main__":
# get the data
print("Loading data...")
rawdata = Table.read(WRITEPATH)
# apply subset to data
if SUBSET:
mask = get_random_choices(rawdata, SUBSETSIZE)
else:
mask = np.ones(len(rawdata['X']), dtype=bool)
rawdata = rawdata[mask]
# construct data matrix
data = np.array([rawdata['X'], rawdata['Y'], rawdata['Z'],
rawdata['U'], rawdata['V'], rawdata['W']]).T
# data = np.array([rawdata['X'], rawdata['Y'], rawdata['Z']]).T
datalist = []
for row in range(data.shape[0]):
datalist.append(list(data[row]))
# get the true labels and group names
labels_true = np.array(rawdata['row'])
groups = np.array(rawdata['group'])
# convert data to 32 bit
data = np.array(data, dtype=np.float32)
# ----------------------------------------------------------------------
# DBscan example from :
# scikit-learn.org/stable/modules/clustering.html#dbscan
# http://scikit-learn.org/stable/auto_examples/cluster/plot_dbscan
# .html#sphx-glr-auto-examples-cluster-plot-dbscan-py
print("Calculating clustering using 'DBSCAN (pyclustering)'...")
dbscan_instance = dbscan(data=datalist, eps=10, neighbors=10, ccore=True)
(ticks, _) = timedcall(dbscan_instance.process)
print("\t\tExecution time: ", ticks, "\n")
clusters = dbscan_instance.get_clusters()
noise = dbscan_instance.get_noise()
plot_selection(data, clusters, noise)
# =============================================================================
# End of code
# =============================================================================
| [
"neil.james.cook@gmail.com"
] | neil.james.cook@gmail.com |
c739f531d187d8de6de5e69205382a8f417ae64b | 1287bbb696e240dd0b92d56d4fdf4246370f3e14 | /_os.py | c72f9cdf864f39a229eeb2990576289335ca9bf5 | [] | no_license | omerfarukcelenk/PythonCalismalari | ed0c204084860fddcb892e6edad84fdbc1ed38ec | 28da12d7d042ec306f064fb1cc3a1a026cb57b74 | refs/heads/main | 2023-04-13T18:23:15.270020 | 2021-04-26T21:06:21 | 2021-04-26T21:06:21 | 361,893,918 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,288 | py | import os
import datetime
result = dir(os)
result = os.name
# os.chdir("C:\\")
# os.chdir("../..")
# result = os.getcwd()
# klosör oluşturma
# os.mkdir("newdirectory")
# os.makedirs("newdirectory/yeniklasör")
# listeleme
# result = os.listdir()
# result = os.listdir("C:\\")
# for dosya in os.listdir():
# if dosya.endswith(".py"):
# print(dosya)
# result = os.stat("date.py")
# result = result.st_size/1024
# result = datetime.datetime.fromtimestamp(result.st_ctime) # oluşturulma tarihi
# result = datetime.datetime.fromtimestamp(result.st_atime) # son erişilme tarihi
# result = datetime.datetime.fromtimestamp(result.st_mtime) # değiştirilme tarihi
# os.system("notepad.exe")
# os.rename("newdirectory","DENEMEklasörü")
# os.rmdir("silmelik")
# os.removedirs("DENEMEklasörü/yeniklasör")
# path
result = os.path.abspath("_os.py")
result = os.path.dirname("D:/Python/_os.py")
result = os.path.dirname(os.path.abspath("_os.py"))
result = os.path.exists("_os.py")
result = os.path.isdir("D:/Python/_os.py")
result = os.path.isfile("D:/Python/_os.py")
result = os.path.join("C://","deneme","deneme1")
result = os.path.split("C://deneme")
result = os.path.splitext("_os.py")
# result = result[0]
result = result[1]
print(result) | [
"omerfar0133@gmail.com"
] | omerfar0133@gmail.com |
aae46ef36c39093f24810743f424bc35211d8e81 | f8da830331428a8e1bbeadf23345f79f1750bd98 | /msgraph-cli-extensions/beta/cloudcommunications_beta/azext_cloudcommunications_beta/vendored_sdks/cloudcommunications/aio/__init__.py | 393cf67a0e7bf2de6c16401ff7377af50235e00c | [
"MIT"
] | permissive | ezkemboi/msgraph-cli | e023e1b7589461a738e42cbad691d9a0216b0779 | 2ceeb27acabf7cfa219c8a20238d8c7411b9e782 | refs/heads/main | 2023-02-12T13:45:03.402672 | 2021-01-07T11:33:54 | 2021-01-07T11:33:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._cloud_communications_async import CloudCommunications
__all__ = ['CloudCommunications']
| [
"japhethobalak@gmail.com"
] | japhethobalak@gmail.com |
c3c1c5c2e9bb7f825bf97947cb99b00c311d398b | 4c117ea3617a576ddd07d8ea8aaab1a925fc402f | /bin/Race/Statistic/CompareRace.py | de1aa74809d77348fa72ce283a8cc5be725edc2e | [] | no_license | 452990729/Rep-seq | 7be6058ba3284bea81282f2db7fd3bd7895173ba | e217b115791e0aba064b2426e4502a5c1b032a94 | refs/heads/master | 2021-12-11T14:27:46.912144 | 2019-06-04T03:49:40 | 2019-06-04T03:49:40 | 190,124,555 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,950 | py | #!/usr/bin/env python
import sys
import re
import os
import numpy as np
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from scipy import stats
def ReadTab(file_in):
dict_tmp = {}
label = re.split('\.', os.path.basename(file_in))[0]
with open(file_in, 'r') as in1:
for line in in1:
list_split = re.split('\s+', line.strip())
dict_tmp['|'.join(list_split[:-2])] = int(list_split[-2])
tal = sum(dict_tmp.values())
return {key:round(float(value)/tal, 4) for key,value in \
dict_tmp.items()}, label
def ScatterPlot(dict1, dict2, label1, label2, tp, outpath):
fig, ax = plt.subplots()
keys = set(dict1.keys())|set(dict2.keys())
x = np.zeros(len(keys))
y = np.zeros(len(keys))
colors = np.random.rand(len(keys))
i = 0
for key in keys:
if key in dict1:
x[i] = dict1[key]
if key in dict2:
y[i] = dict2[key]
i += 1
ax.scatter(x, y, c=colors, alpha=0.5)
slope, intercept, r_value, p_value, std_err = stats.linregress(x, y)
base = (int(max(x.max(), y.max())*100)+1)*0.01
if tp != 'VDJ':
for i, txt in enumerate(keys):
if abs(x[i]-y[i]) >= 0.2*base:
ax.annotate(txt, (x[i],y[i]))
ax.annotate('$R^{2}$ = '+str(round(r_value, 3)), (0.3*base,0.9*base))
m, b = np.polyfit(x, y, 1)
ax.plot(x, m*x + b, '-')
ax.set_xlim(-0.001, base)
ax.set_ylim(-0.001, base)
ax.set_xlabel(label1)
ax.set_ylabel(label2)
ax.set_title('Comparison of {} between {} and {}'.format(tp, label1, label2))
plt.savefig(os.path.join(outpath, \
'ComparisonOf{}Between{}and{}.png'.format(tp, label1, label2)))
def main():
dict1, label1 = ReadTab(sys.argv[2])
dict2, label2 = ReadTab(sys.argv[3])
ScatterPlot(dict1, dict2, label1, label2, sys.argv[1], sys.argv[4])
if __name__ == '__main__':
main()
| [
"452990729@qq.com"
] | 452990729@qq.com |
21b5f6abaa58f11530183e3547fa53284d2b8c4d | 3b3ca3b587b4ed9181038444b03bf526d4b14346 | /axelerate/networks/yolo/backend/utils/box.py | 0c31ccf5ed52e3d6cbaccf53ad3a897ef663e481 | [
"MIT"
] | permissive | ujuo/aXeleRate | fd217cc539ed31b43635e989c2e2ce9bb12f8486 | 2ff08e314e5e56becbaaebb28b0181f90fed5f5e | refs/heads/master | 2023-07-07T18:29:39.994544 | 2021-08-12T09:12:27 | 2021-08-12T09:12:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,064 | py | import numpy as np
import cv2
class BoundBox:
def __init__(self, x, y, w, h, c = None, classes = None):
self.x = x
self.y = y
self.w = w
self.h = h
self.c = c
self.classes = classes
def get_label(self):
return np.argmax(self.classes)
def get_score(self):
return self.classes[self.get_label()]
def iou(self, bound_box):
b1 = self.as_centroid()
b2 = bound_box.as_centroid()
return centroid_box_iou(b1, b2)
def as_centroid(self):
return np.array([self.x, self.y, self.w, self.h])
def boxes_to_array(bound_boxes):
"""
# Args
boxes : list of BoundBox instances
# Returns
centroid_boxes : (N, 4)
probs : (N, nb_classes)
"""
centroid_boxes = []
probs = []
for box in bound_boxes:
centroid_boxes.append([box.x, box.y, box.w, box.h])
probs.append(box.classes)
return np.array(centroid_boxes), np.array(probs)
def nms_boxes(boxes, n_classes, nms_threshold=0.3, obj_threshold=0.3):
"""
# Args
boxes : list of BoundBox
# Returns
boxes : list of BoundBox
non maximum supressed BoundBox instances
"""
# suppress non-maximal boxes
for c in range(n_classes):
sorted_indices = list(reversed(np.argsort([box.classes[c] for box in boxes])))
for i in range(len(sorted_indices)):
index_i = sorted_indices[i]
if boxes[index_i].classes[c] == 0:
continue
else:
for j in range(i+1, len(sorted_indices)):
index_j = sorted_indices[j]
if boxes[index_i].iou(boxes[index_j]) >= nms_threshold:
boxes[index_j].classes[c] = 0
# remove the boxes which are less likely than a obj_threshold
boxes = [box for box in boxes if box.get_score() > obj_threshold]
return boxes
def draw_scaled_boxes(image, boxes, probs, labels, desired_size=400):
img_size = min(image.shape[:2])
if img_size < desired_size:
scale_factor = float(desired_size) / img_size
else:
scale_factor = 1.0
h, w = image.shape[:2]
img_scaled = cv2.resize(image, (int(w*scale_factor), int(h*scale_factor)))
if boxes != []:
boxes_scaled = boxes*scale_factor
boxes_scaled = boxes_scaled.astype(np.int)
else:
boxes_scaled = boxes
return draw_boxes(img_scaled, boxes_scaled, probs, labels)
def draw_boxes(image, boxes, probs, labels):
for box, classes in zip(boxes, probs):
x1, y1, x2, y2 = box
cv2.rectangle(image, (x1,y1), (x2,y2), (0,255,0), 3)
cv2.putText(image,
'{}: {:.2f}'.format(labels[np.argmax(classes)], classes.max()),
(x1, y1 - 13),
cv2.FONT_HERSHEY_SIMPLEX,
1e-3 * image.shape[0],
(0,0,255), 1)
return image
def centroid_box_iou(box1, box2):
def _interval_overlap(interval_a, interval_b):
x1, x2 = interval_a
x3, x4 = interval_b
if x3 < x1:
if x4 < x1:
return 0
else:
return min(x2,x4) - x1
else:
if x2 < x3:
return 0
else:
return min(x2,x4) - x3
_, _, w1, h1 = box1.reshape(-1,)
_, _, w2, h2 = box2.reshape(-1,)
x1_min, y1_min, x1_max, y1_max = to_minmax(box1.reshape(-1,4)).reshape(-1,)
x2_min, y2_min, x2_max, y2_max = to_minmax(box2.reshape(-1,4)).reshape(-1,)
intersect_w = _interval_overlap([x1_min, x1_max], [x2_min, x2_max])
intersect_h = _interval_overlap([y1_min, y1_max], [y2_min, y2_max])
intersect = intersect_w * intersect_h
union = w1 * h1 + w2 * h2 - intersect
return float(intersect) / union
def to_centroid(minmax_boxes):
"""
minmax_boxes : (N, 4) [[100, 120, 140, 200]]
centroid_boxes: [[120. 160. 40. 80.]]
"""
#minmax_boxes = np.asarray([[100, 120, 140, 200]])
minmax_boxes = minmax_boxes.astype(np.float)
centroid_boxes = np.zeros_like(minmax_boxes)
x1 = minmax_boxes[:,0]
y1 = minmax_boxes[:,1]
x2 = minmax_boxes[:,2]
y2 = minmax_boxes[:,3]
centroid_boxes[:,0] = (x1 + x2) / 2
centroid_boxes[:,1] = (y1 + y2) / 2
centroid_boxes[:,2] = x2 - x1
centroid_boxes[:,3] = y2 - y1
return centroid_boxes
def to_minmax(centroid_boxes):
centroid_boxes = centroid_boxes.astype(np.float)
minmax_boxes = np.zeros_like(centroid_boxes)
cx = centroid_boxes[:,0]
cy = centroid_boxes[:,1]
w = centroid_boxes[:,2]
h = centroid_boxes[:,3]
minmax_boxes[:,0] = cx - w/2
minmax_boxes[:,1] = cy - h/2
minmax_boxes[:,2] = cx + w/2
minmax_boxes[:,3] = cy + h/2
return minmax_boxes
def create_anchor_boxes(anchors):
"""
# Args
anchors : list of floats
# Returns
boxes : array, shape of (len(anchors)/2, 4)
centroid-type
"""
boxes = []
n_boxes = int(len(anchors)/2)
for i in range(n_boxes):
boxes.append(np.array([0, 0, anchors[2*i], anchors[2*i+1]]))
return np.array(boxes)
def find_match_box(centroid_box, centroid_boxes):
"""Find the index of the boxes with the largest overlap among the N-boxes.
# Args
box : array, shape of (1, 4)
boxes : array, shape of (N, 4)
# Return
match_index : int
"""
match_index = -1
max_iou = -1
for i, box in enumerate(centroid_boxes):
iou = centroid_box_iou(centroid_box, box)
if max_iou < iou:
match_index = i
max_iou = iou
return match_index
| [
"dmitrywat@gmail.com"
] | dmitrywat@gmail.com |
6a750ce82b607d3b695603bebe56a278f9cb3f0c | 5c80c1c3a24399db5d7c2a259a3e2d18dcbe79a2 | /central/habana_model_yaml_config.py | 0c55efe4e3aab6954cf696738acc3d3b9508d17c | [] | no_license | maxchung2001/Model-References | b2f26cec3bcfc912f50379e47fcff7cb60ea96d2 | bc8da16830c1c35e5d1458ba2e46df8726e10f29 | refs/heads/master | 2023-04-12T16:37:27.103316 | 2021-04-22T07:00:01 | 2021-04-22T07:00:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,550 | py | ###############################################################################
# Copyright (C) 2020-2021 Habana Labs, Ltd. an Intel Company
###############################################################################
import os
from typing import Dict, List
import yaml
from central.habana_model_runner_utils import get_canonical_path
# Derive from this class for model-specific handling of yaml config file parameters
class HabanaModelYamlConfig():
default_num_workers_per_hls = 8
def __init__(self, modelname, filename):
self.model = modelname
self.hb_config = filename
self.parsed_config = None
self.env_variables = None
self.model_parameters = None
self.model_parameters_store_true = None
config_path = get_canonical_path(self.hb_config)
if config_path.is_file() is False:
raise OSError(f"hb_config has to be existing yaml file, but there is no file {config_path}")
self.process_config_file(config_path)
self.add_env_vars({'TF_DISABLE_MKL': '1'})
def process_config_file(self, config_path):
try:
with open(config_path, "r") as hb_config:
self.parsed_config = yaml.load(hb_config)
print(self.parsed_config['model'])
print(self.parsed_config['env_variables'])
print(self.parsed_config['parameters'])
if self.model != self.parsed_config['model']:
raise Exception(f"{self.hb_config} has a different model name \"{self.parsed_config['model']}\" than \"{self.model}\"")
self.env_variables = self.parsed_config['env_variables'] \
if self.parsed_config['env_variables'] is not None else {}
self.model_parameters = self.parsed_config['parameters']
if self.model_parameters.get('store_true'):
self.model_parameters_store_true = self.model_parameters.pop('store_true')
except Exception as exc:
raise RuntimeError(f"Error in {self.__class__.__name__} process_config_file()") from exc
def get_env_vars(self):
return self.env_variables
def add_env_vars(self, new_vars:dict):
self.env_variables.update(new_vars)
def get_parameters(self):
return self.model_parameters
def get_parameter(self, key):
if isinstance(self.model_parameters, dict):
return self.model_parameters.get(key)
return None
# Derived classes can override how they want yaml sequences that contain list specifications, to be de-serialized
def add_list_elem(self, key, list_value, command: List[str]):
value = ","
value = value.join(str(elem) for elem in list_value)
command.extend([f"--{key}={value}"])
# Adds leaf-level yaml config nodes as command-line options
def add_hier_record(self, record_key, record_value, command: List[str]):
if record_value is not None:
if isinstance(record_value, dict):
for key, value in record_value.items():
self.add_hier_record(key, value, command)
elif isinstance(record_value, list):
self.add_list_elem(record_key, record_value, command)
else:
command.extend([f"--{record_key}={record_value}"])
def traverse(self, record_key, record_value, command: List[str], fields):
if fields != []:
if record_key == fields[0]:
if len(fields) > 1:
fields = fields[1:]
if record_value[fields[0]] is not None:
if isinstance(record_value[fields[0]], dict) == False:
if len(fields) > 1:
raise Exception(f"Error in {self.__class__.__name__} traverse(): specified filter does not match yaml config")
else:
command.extend([f"--{fields[0]}={record_value[fields[0]]}"])
else:
for key, value in record_value[fields[0]].items():
if value is not None:
rfields = fields[1:]
self.traverse(key, value, command, rfields)
elif record_value is not None:
self.add_hier_record(record_key, record_value, command)
elif record_value is not None:
self.add_hier_record(record_key, record_value, command)
elif record_value is not None:
self.add_hier_record(record_key, record_value, command)
# Adds all parameters, with the exception of records that don't pass the filter, and top-level excluded keys.
# filter is a "/" separated field-specifier to zoom into a particular record in the yaml config.
# All records that this search resolves to, will be added.
# Example usage: See TensorFlow/nlp/bert/bert_base_default.yaml and the filter 'dataset_parameters/mrpc/data_type_parameters/bf16'
# used in 'HabanaBertModelRunner' in habana_model_runner.py.
def add_parameters_with_filter(self, command: List[str], filter: str, exclude_fields: List[str]=None):
try:
if self.model_parameters is not None:
fields = filter.split('/')
for key, value in self.model_parameters.items():
if key not in exclude_fields and value is not None:
self.traverse(key, value, command, fields)
if self.model_parameters_store_true is not None:
for item in self.model_parameters_store_true:
command.extend([f"--{item}"])
except Exception as exc:
raise RuntimeError(f"Error in {self.__class__.__name__} add_parameters_with_filter()") from exc
# Derived classes can override how 'use_horovod' along with 'num_workers_per_hls' is passed to the training script
def add_horovod_parameter(self, command: List[str]):
try:
key = 'use_horovod'
if self.model_parameters[key] == True:
if self.model_parameters['num_workers_per_hls'] > 1:
command.extend([f"--{key}={self.model_parameters['num_workers_per_hls']}"])
else:
command.extend([f"--{key}={default_num_workers_per_hls}"])
except Exception as exc:
raise RuntimeError(f"Error in {self.__class__.__name__} add_horovod_parameter()") from exc
# Derive from HabanaModelYamlConfig to override
def add_all_parameters(self, command: List[str]):
try:
# add all parameters to command
if self.model_parameters is not None:
for key, value in self.model_parameters.items():
if key == 'use_horovod':
if value == True:
if self.model_parameters['num_workers_per_hls'] > 1:
command.extend([f"--{key}={self.model_parameters['num_workers_per_hls']}"])
else:
command.extend([f"--{key}={default_num_workers_per_hls}"])
continue
if key == 'num_workers_per_hls':
continue
if value is not None:
self.add_hier_record(key, value, command)
if self.model_parameters_store_true is not None:
for item in self.model_parameters_store_true:
command.extend([f"--{item}"])
except Exception as exc:
raise RuntimeError(f"Error in {self.__class__.__name__} add_all_parameters()") from exc
# 'include_fields' is a "/" separated field specifier to zoom into a particular record in the yaml config
# All records that this search resolves to, will be added
def add_specific_parameters(self, command: List[str], include_fields: str):
try:
if self.model_parameters is not None:
fields = include_fields.split('/')
record = self.model_parameters
for field in fields:
record = record[field]
for key, value in record.items():
if value is not None:
self.add_hier_record(key, value, command)
if self.model_parameters_store_true is not None:
for item in self.model_parameters_store_true:
command.extend([f"--{item}"])
except Exception as exc:
raise RuntimeError(f"Error in {self.__class__.__name__} add_specific_parameters()") from exc
# exclude_fields is a list of fields to skip at the top-most parameter level
def add_parameters_except(self, command: List[str], exclude_fields: List[str]):
try:
# add all parameters to command
if self.model_parameters is not None:
for key, value in self.model_parameters.items():
if key not in exclude_fields:
if value is not None:
self.add_hier_record(key, value, command)
if self.model_parameters_store_true is not None:
for item in self.model_parameters_store_true:
command.extend([f"--{item}"])
except Exception as exc:
raise RuntimeError(f"Error in {self.__class__.__name__} add_parameters_except()") from exc
| [
"mpandit@habana.ai"
] | mpandit@habana.ai |
614e672660a0abfa0f13cfb8d422c9619c42d2c0 | acfd30f536de2b36daf3a0e7d7d8ebf9df1d7b31 | /ml_techniques/svm.py | f6ed5542344609d419b57b661af67a6caab169c9 | [
"MIT"
] | permissive | tgquintela/ml_techniques_in_python | 98c53735bf532ef641f55a0c0f44e295d55de302 | 242ff12b32edb313c983394f3153a8b7eb8ce183 | refs/heads/master | 2021-01-16T17:39:01.423695 | 2017-08-11T09:02:08 | 2017-08-11T09:02:08 | 100,012,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,887 | py |
import numpy as np
import time
import os
class Solver(object):
def report_results(self):
pathfile = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'extra/templates/report_svm_template')
with open(pathfile, 'r') as fh:
report = fh.read()
# The criteria of get the accuracy will be the accuracies in the epoch
# with better test accuracy.
if self.test_accuracy_history:
best_i = np.argmax(self.test_accuracy_history)
train_accuracy = self.train_accuracy_history[best_i]
test_accuracy = self.test_accuracy_history[best_i]
reg_pars = self.regularizer.parameters
report = report.format(self.learning_rate, reg_pars,
self.batch_size, self.fit_time,
self.epoch_learned, best_i,
train_accuracy, test_accuracy)
print(report)
def _reset_history(self):
self.epoch_learned = 0
self.fit_time = 0.
self.train_loss_history = None
self.test_loss_history = None
self.train_accuracy_history = None
self.test_accuracy_history = None
if self.history:
self.train_loss_history = []
self.train_accuracy_history = []
self.test_loss_history = []
self.test_accuracy_history = []
def _add_epoch_to_history(self, train_loss, train_accuracy,
test_loss=None, test_accuracy=None):
if self.history:
self.train_loss_history.append(train_loss)
self.train_accuracy_history.append(train_accuracy)
if test_loss is not None:
self.test_loss_history.append(test_loss)
self.test_accuracy_history.append(test_accuracy)
class SGD(Solver):
"""SVM with SGD optimizer.
"""
def __init__(self, loss='Hinge', loss_pars=1.0, model='svm',
model_pars=None, regularizer='null', reg_pars=1.,
batch_size=10, n_epochs=0, learning_rate=0.0001,
stop_step=.000001, history=True, verbose=False):
## Definition of complentary elements
self.lossf = LossFunction.create_lossfunction(loss, loss_pars)
self.model = Model.create_model(model, model_pars)
self.regularizer = Regularization.create_regularization(regularizer,
reg_pars)
## Optimizer parameters
self.optimizer = 'SGD'
self.n_epochs = n_epochs
self.batch_size = batch_size
self.learning_rate = learning_rate
self.stop_step = stop_step
## Tracking results
self.last_loss = 1e16
self.change_loss = 1e16
self.history = history
self._reset_history()
def predict(self, X):
return self.model.compute(X)
def score(self, X, y):
"""Scoring with accuracy measure by default."""
y_pred = self.predict(X)
return accuracy(y_pred, y)
def fit(self, X_train, y_train, X_test=None, y_test=None):
""""""
# Setting administrative parameters
self._reset_history()
t0 = time.time()
# Setting general paramaters
N_samples, n_feats = X_train.shape
# Initialize model parameters
self.model = self.model.weights_initialization(n_feats)
# self._initialization_weights(n_feats)
# Setting loop epochs
for i in range(self.n_epochs):
# Shuffling data
data, labels = permut_data(X_train, y_train)
# Loop over batches
for x_batch, y_batch in self._batch_generator(data, labels):
# Batch prediction
y_batch_pred = self.predict(x_batch)
# Update model
self.update_model(x_batch, y_batch, y_batch_pred)
# Tracking loss
self.compute_epoch_measures(X_train, y_train, X_test, y_test)
# Tracking loop
if self.change_loss < self.stop_step:
break
self.fit_times = time.time()-t0
return self
def update_model(self, x_batch, y_batch, y_batch_pred):
# Compute gradient
gradloss_w, gradloss_w0 =\
self.lossf.gradient_loss(y_batch_pred, y_batch, x_batch)
gradloss_w, gradloss_w0 =\
self._add_gradient_regularization(gradloss_w, gradloss_w0)
# Parameters update
new_w = self.model.w - self.learning_rate*gradloss_w
new_w0 = self.model.w0 - self.learning_rate*gradloss_w0
self.model = self.model.reinstantiate(new_w, new_w0)
def compute_epoch_measures(self, X_train, y_train, X_test, y_test):
## Train
losses_epoch, train_acc = self._compute_measures(X_train, y_train)
# Change loss
self.change_loss = self.last_loss-losses_epoch
self.last_loss = losses_epoch
## Test
testing_phase = (X_test is not None) and (y_test is not None)
if testing_phase:
loss_test, acc_test = self._compute_measures(X_test, y_test)
else:
loss_test, acc_test = None, None
# Add to history
self._add_epoch_to_history(losses_epoch, train_acc,
loss_test, acc_test)
# Add new epoch to the counter
self.epoch_learned += 1
def _compute_measures(self, X, y):
# Prediction train
y_p = self.predict(X)
lossf_term = self.lossf.loss(y_p, y)
loss = self._add_regularization_loss(lossf_term)
# Accuracy train
acc = accuracy(np.sign(y_p), y)
return loss, acc
def _batch_generator(self, data, labels):
"""We can implement here different options to sample batches."""
N_samples = len(data)
for init, endit in batch_size_iter(N_samples, self.batch_size):
x_batch = data[init:endit]
y_batch = labels[init:endit]
yield x_batch, y_batch
def _add_regularization_loss(self, loss):
""""""
reg_term = 0.5*(self.regularizer.regularize(*self.model.parameters))
loss += reg_term
return loss
def _add_gradient_regularization(self, gradloss_w, gradloss_w0):
grad_w_reg, grad_w0_reg =\
self.regularizer.gradient_regularization(*self.model.parameters)
gradloss_w += grad_w_reg
gradloss_w0 += grad_w0_reg
return gradloss_w, gradloss_w0
# def _reset_model(self):
# self.w = None
# self.w0 = None
#
# def _initialization_weights(self, n_feats, init_type='gauss'):
# if init_type == 'zeros':
# self.w0 = 0.
# self.w = np.zeros(n_feats)
# elif init_type == 'gauss':
# self.w0 = np.random.randn()
# self.w = np.random.randn(n_feats)
class SVM(SGD):
def __init__(self, loss='Hinge', loss_pars=1.0, regularizer='l2',
reg_pars=1., batch_size=10, n_epochs=0,
learning_rate=0.0001, stop_step=.000001,
verbose=False, history=True):
super(SVM, self).__init__(loss=loss, loss_pars=loss_pars,
regularizer=regularizer, reg_pars=reg_pars,
batch_size=batch_size, n_epochs=n_epochs,
learning_rate=learning_rate,
stop_step=stop_step, verbose=verbose,
history=history)
def accuracy(y_pred, y_true):
return np.sum(y_true == y_pred) / float(y_true.shape[0])
def batch_size_iter(data_size, batch_size):
init = 0
keep = True
while keep:
endit = init+batch_size
if endit >= data_size:
endit = data_size
keep = False
yield init, endit
init += batch_size
def permut_data(data, labels=None):
n_samples = len(data)
reindices = np.random.permutation(n_samples)
if labels is None:
return data[reindices]
else:
return data[reindices], labels[reindices]
#def stop_condition(loss_change, i, N, stop_step):
# if N != 0:
# if i >= N:
# return False
# if loss_change < stop_step:
# return False
# return True
################################### Model #####################################
###############################################################################
class Model(object):
@classmethod
def create_model(cls, model, *parameters):
models = {'svm': LinearModel}
if type(model) == str:
return models[model.lower()](*parameters)
if isinstance(model, Model):
return model
else:
return model(*parameters)
@classmethod
def reinstantiate(cls, *args):
return cls(*args)
class LinearModel(Model):
"Linar model."
def __init__(self, w, w0=0.):
if w is None:
self.w = np.array([1.])
self.w0 = 0.
else:
self.w = w
self.w0 = w0
@classmethod
def weights_initialization(cls, n_feats, init_type='gauss'):
if init_type == 'zeros':
w0 = 0.
w = np.zeros(n_feats)
elif init_type == 'gauss':
w = np.random.randn(n_feats)
w0 = np.random.randn()
return cls(w, w0)
@property
def parameters(self):
return self.w, self.w0
def compute(self, X):
return np.dot(X, self.w)+self.w0
def reset_model(self):
self.w = None
self.w0 = None
################################ Loss function ################################
###############################################################################
class LossFunction(object):
"""General object for loss functions."""
def __init__(self):
required_functions = ['loss', 'gradient_loss']
for req in required_functions:
assert(req in dir(self))
@classmethod
def create_lossfunction(cls, loss, *parameters):
## Setting loss
loss_functions = {'hinge': Hinge}
if type(loss) == str:
return loss_functions[loss.lower()](*parameters)
if isinstance(loss, LossFunction):
return loss
else:
return loss(*parameters)
class Hinge(LossFunction):
"""Loss function for trainning binary linear classifiers with target
{-1, 1}. It computes the aggregated loss, not the average per sample.
"""
def __init__(self, threshold=1.0):
self.threshold = threshold
super(Hinge, self).__init__()
def loss(self, y_pred, y_true):
z = y_pred * y_true
losses = (self.threshold - z)*(z <= self.threshold).astype(float)
loss = np.mean(losses)
return loss
def gradient_loss(self, y_pred, y, x):
"""Derivation dL/dw. It is separated the output for w and w0.
"""
z = y_pred * y
in_margin = (z <= self.threshold).astype(float)
gradloss_w = -np.dot(y*in_margin, x)
gradloss_w0 = -np.sum(y*in_margin)
return gradloss_w, gradloss_w0
################################ Regularization ###############################
###############################################################################
class Regularization(object):
@classmethod
def create_regularization(cls, regularizer, *parameters):
regularizations =\
{'null': Null_Regularization, 'l2': L2_Regularization}
if type(regularizer) == str:
return regularizations[regularizer.lower()](*parameters)
if isinstance(regularizer, Regularization):
return regularizer
else:
return regularizer(*parameters)
class Null_Regularization(Regularization):
def __init__(self):
pass
@property
def parameters(self):
return None
def regularize(self, *args):
return 0.
def gradient_regularization(self, *args):
return 0.
class L1_Regularization(Regularization):
def __init__(self, lambda_par):
self.lambda_par = lambda_par
@property
def parameters(self):
return self.lambda_par
def regularize(self, w, w0=0.):
return 0.5*(np.sqrt(np.dot(w, w))+w0)*self.lambda_par
def gradient_regularization(self, w, w0=0.):
return self.lambda_par, self.lambda_par
class L2_Regularization(Regularization):
def __init__(self, lambda_par):
self.lambda_par = lambda_par
@property
def parameters(self):
return self.lambda_par
def regularize(self, w, w0=0.):
return 0.5*(np.dot(w, w)+w0**2)*self.lambda_par
def gradient_regularization(self, w, w0=0.):
return self.lambda_par*w, self.lambda_par*w0
| [
"tgq.spm@gmail.com"
] | tgq.spm@gmail.com |
d85c622fbd7c8ecae479ca39479dd1212ab795cc | 673e829dda9583c8dd2ac8d958ba1dc304bffeaf | /data/multilingual/Latn.ROH/Sans_16/pdf_to_json_test_Latn.ROH_Sans_16.py | e170b801401e0141c2c13650a2c4b158f8c1a120 | [
"BSD-3-Clause"
] | permissive | antoinecarme/pdf_to_json_tests | 58bab9f6ba263531e69f793233ddc4d33b783b7e | d57a024fde862e698d916a1178f285883d7a3b2f | refs/heads/master | 2021-01-26T08:41:47.327804 | 2020-02-27T15:54:48 | 2020-02-27T15:54:48 | 243,359,934 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 303 | py | import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.ROH/Sans_16/udhr_Latn.ROH_Sans_16.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
2ac1d285ba6c3768c202afcc8a5a6a7e0e1c731d | 350db570521d3fc43f07df645addb9d6e648c17e | /1480_Running_Sum_of_1d_Array/solution_test.py | aa3df5123a9a3b0d950bbc9f0bba6ab00a0c8c5d | [] | no_license | benjaminhuanghuang/ben-leetcode | 2efcc9185459a1dd881c6e2ded96c42c5715560a | a2cd0dc5e098080df87c4fb57d16877d21ca47a3 | refs/heads/master | 2022-12-10T02:30:06.744566 | 2022-11-27T04:06:52 | 2022-11-27T04:06:52 | 236,252,145 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 379 | py |
'''
1480. Running Sum of 1d Array
Level: Easy
https://leetcode.com/problems/running-sum-of-1d-array
'''
import unittest
class TestSum(unittest.TestCase):
def test_sum(self):
self.assertEqual(sum([1, 2, 3]), 6, "Should be 6")
def test_sum_tuple(self):
self.assertEqual(sum((1, 2, 2)), 6, "Should be 6")
if __name__ == '__main__':
unittest.main() | [
"bhuang@rms.com"
] | bhuang@rms.com |
b4ed3f8fcd2bd524cc2d83d695bf06fa722eca15 | 63782b2e7bc03274f918868cf2f3bc221a0f32a1 | /hj57.py | c672183778d1569782cb48555108fc6a396a3e97 | [] | no_license | imsilence/huawei_nowcoder | a111aea3b99e30029a5c954a1e329d88817747aa | 30ca82d05340155e918b13783a32cf1802dd3e3b | refs/heads/master | 2022-12-27T05:57:52.097157 | 2020-10-12T01:54:22 | 2020-10-12T01:54:22 | 302,632,596 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,687 | py | #encoding: utf-8
'''
题目描述
在计算机中,由于处理器位宽限制,只能处理有限精度的十进制整数加减法,比如在32位宽处理器计算机中,
参与运算的操作数和结果必须在-2^31~2^31-1之间。如果需要进行更大范围的十进制整数加法,需要使用特殊
的方式实现,比如使用字符串保存操作数和结果,采取逐位运算的方式。如下:
9876543210 + 1234567890 = ?
让字符串 num1="9876543210",字符串 num2="1234567890",结果保存在字符串 result = "11111111100"。
-9876543210 + (-1234567890) = ?
让字符串 num1="-9876543210",字符串 num2="-1234567890",结果保存在字符串 result = "-11111111100"。
要求编程实现上述高精度的十进制加法。
要求实现方法:
public String add (String num1, String num2)
【输入】num1:字符串形式操作数1,如果操作数为负,则num1的前缀为符号位'-'
num2:字符串形式操作数2,如果操作数为负,则num2的前缀为符号位'-'
【返回】保存加法计算结果字符串,如果结果为负,则字符串的前缀为'-'
注:
(1)当输入为正数时,'+'不会出现在输入字符串中;当输入为负数时,'-'会出现在输入字符串中,且一定在输入字符串最左边位置;
(2)输入字符串所有位均代表有效数字,即不存在由'0'开始的输入字符串,比如"0012", "-0012"不会出现;
(3)要求输出字符串所有位均为有效数字,结果为正或0时'+'不出现在输出字符串,结果为负时输出字符串最左边位置为'-'。
输入描述:
输入两个字符串
输出描述:
输出给求和后的结果
示例1
输入
9876543210
1234567890
输出
11111111100
'''
def add(left, right):
rt = []
def getValue(txt, i):
if i < 0:
return 0
return int('0' if i >= len(txt) else txt[i])
flag = 0
for i in range(max(len(left), len(right))):
value = getValue(left, len(left) - i - 1) + getValue(right, len(right) - i - 1) + flag
if value >= 10:
flag = 1
value -= 10
else:
flag = 0
rt.append(str(value))
if flag:
rt.append('1')
return ''.join(rt[::-1])
def sub(flag, left, right):
rt = []
def getValue(txt, i):
if i < 0:
return 0
return int('0' if i >= len(txt) else txt[i])
subFlag = 0
for i in range(max(len(left), len(right))):
value = getValue(left, len(left) - i - 1) - getValue(right, len(right) - i - 1) - subFlag
if value < 0:
subFlag = 1
value += 10
else:
subFlag = 0
rt.append(str(value))
if flag:
rt.append('-')
return ''.join(rt[::-1])
def solution():
left = input().strip()
right = input().strip()
leftFlag = 0
if left[0] == '-':
leftFlag = 1
left = left[1:]
rightFlag = 0
if right[0] == '-':
rightFlag = 1
right = right[1:]
if rightFlag + leftFlag == 0:
return add(left, right)
elif rightFlag + leftFlag == 2:
return "-" + add(left, right)
elif len(left) == len(right):
if left == right:
return '0'
elif left > right:
return sub(leftFlag, left, right)
else:
return sub(rightFlag, right, left)
elif len(left) > len(right):
return sub(leftFlag, left, right)
else:
return sub(rightFlag, right, left)
if __name__ == '__main__':
while True:
try:
print(solution())
except Exception as e:
print(e)
break | [
"imsilence@outlook.com"
] | imsilence@outlook.com |
523c535969ca9eb444fb4f8250bf8bd22bc01f0d | ba9cb3bbc46faeea1edc01ef7e18131ae2dbf923 | /problem-030.py | e03c5f5788b3724615b117587ada3195a2cd5e6a | [] | no_license | beautytiger/project-euler | fb9908d35a82cd4b912a541282842adca03b17e2 | a8de90a2e5b98660505169afd9c8c27b1b3af28e | refs/heads/master | 2021-06-19T00:40:17.331130 | 2017-05-31T11:04:05 | 2017-05-31T11:04:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,112 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from tools.runningTime import runTime
from tools.common import DigitsPowerSum as DPS
# below function will give you a hint for the uplimit of this problem
@runTime
def numberStep(limit=9**5*6):
step = [1, 1]
for i in xrange(2, limit):
s = DPS(i, 5)
if s > step[-1]:
step.extend([i, s])
print step
# below function will print numbers that its DPS bigger than 9999 and in [10000, 89999).
# output: [19999, 29999, 39999, 49999, 59999, 69999, 79999, 88999, 89899, 89989, 89998]
@runTime
def oneMoreDigit():
target = []
st = DPS(9999, 5)
for i in range(10000, 89999):
if DPS(i, 5)> st:
target.append(i)
print target
# the key is to find the bounds of this list.
@runTime
def bruteForce(exp=5):
# one obvious up-edge is 9**5*6 = 354294
# one edge is 9**exp*(exp-1) which is more efficient. 9**5*4 = 236196
print "Result: {}".format(sum([n for n in xrange(11, 9**exp*(exp-1)) if DPS(n, exp)==n]))
if __name__ == "__main__":
# numberStep()
# oneMoreDigit()
bruteForce()
| [
"konmyn@163.com"
] | konmyn@163.com |
3bab6516ea13afd54cb7cbbcf9c233a58a308fd1 | dacb257a90310eba03f3128221120a7d54b894ba | /dev/unix/rf433switch.py | bdcb7ab9d8602202d7d3198ff4192c99297560d2 | [
"MIT"
] | permissive | SiChiTong/pysmartnode | 92351efa02e52aa84185a53896957c453b12540a | a0998ad6582a28fe5a0529fb15dd4f61e254d25f | refs/heads/master | 2023-01-05T10:00:14.907988 | 2020-09-01T10:07:45 | 2020-09-01T10:07:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,244 | py | # Author: Kevin Köck
# Copyright Kevin Köck 2019-2020 Released under the MIT license
# Created on 2019-07-03
"""
example config:
{
package: .unix.rf433switch
component: RF433
constructor_args: {
unit_code: "10001"
unit: "1"
# expected_execution_time_on: 500 # optional, estimated execution time; allows other coroutines to run during that time
# expected_execution_time_off: 500 # optional, estimated execution time; allows other coroutines to run during that time
# iterations: 1 # optional, number of times the command will be executed
# iter_delay: 20 # optional, delay in ms between iterations
# mqtt_topic: null #optional, defaults to <mqtt_home>/<device_id>/RF433<count>/set
# friendly_name: null # optional, friendly name shown in homeassistant gui with mqtt discovery
}
}
"""
__updated__ = "2019-09-29"
__version__ = "0.4"
import gc
from pysmartnode import config
from pysmartnode import logging
from pysmartnode.utils.component import Switch, DISCOVERY_SWITCH
from .popen_base import Popen
####################
COMPONENT_NAME = "RF433Switch"
# define the type of the component according to the homeassistant specifications
_COMPONENT_TYPE = "switch"
####################
_mqtt = config.getMQTT()
_log = logging.getLogger(COMPONENT_NAME)
gc.collect()
_unit_index = -1
COMMAND_ON = "~/raspberry-remote/send {!s} {!s} 1"
COMMAND_OFF = "~/raspberry-remote/send {!s} {!s} 0"
EXPECTED_RETURN_ON = 'using pin 0\nsending systemCode[{!s}] unitCode[{!s}] command[1]\n'
EXPECTED_RETURN_OFF = 'using pin 0\nsending systemCode[{!s}] unitCode[{!s}] command[0]\n'
class RF433(Switch):
lock = config.Lock() # only one method can have control over the RF433 device
def __init__(self, unit_code, unit, expected_execution_time_on=500, expected_execution_time_off=500,
iterations=1, iter_delay=10, mqtt_topic=None, friendly_name=None):
super().__init__()
self._log = _log
# This makes it possible to use multiple instances of Switch
global _unit_index
self._count = _count
_unit_index += 1
self._topic = mqtt_topic or _mqtt.getDeviceTopic("{!s}{!s}".format(COMPONENT_NAME, self._count),
is_request=True)
self._subscribe(self._topic, self.on_message)
self._frn = friendly_name
gc.collect()
self.unit_lock = config.Lock()
self._c_on = Popen(COMMAND_ON.format(unit_code, unit), EXPECTED_RETURN_ON.format(unit_code, unit),
expected_execution_time_on, iterations, iter_delay)
self._c_off = Popen(COMMAND_OFF.format(unit_code, unit), EXPECTED_RETURN_OFF.format(unit_code, unit),
expected_execution_time_off, iterations, iter_delay)
async def on_message(self, topic, msg, retain):
if self.unit_lock.locked():
return False
async with self.lock:
async with self.unit_lock:
if msg in _mqtt.payload_on:
r = await self._c_on.execute()
if r is True:
await _mqtt.publish(self._topic[:-4], "ON", qos=1, retain=True) # makes it easier to subclass
return True
else:
await self._log.asyncLog("warn", "Got unexpected return: {!s}".format(r))
return False
elif msg in _mqtt.payload_off:
r = await self._c_off.execute()
if r is True:
await _mqtt.publish(self._topic[:-4], "OFF", qos=1, retain=True)
return True
else:
await self._log.asyncLog("warn", "Got unexpected return: {!s}".format(r))
return False
async def _discovery(self):
name = "{!s}{!s}".format(COMPONENT_NAME, self._count)
await self._publishDiscovery(_COMPONENT_TYPE, self._topic[:-4], name, DISCOVERY_SWITCH, self._frn)
# note that _publishDiscovery does expect the state topic but we have the command topic stored.
| [
"kevinkk525@users.noreply.github.com"
] | kevinkk525@users.noreply.github.com |
73bb93dc2fdee7465546f4c3704f0c99b53a1c65 | 8d2a785ffc06ec46a546cdf50af41054a382f05a | /classes/day15/testideaP/testideaP/urls.py | 53105cc9ee90854ad8f7cf1033d571a40ae3d3d0 | [] | no_license | Pigcanflysohigh/Py27 | 4be0d9ad93f5d695c48fd89157952230ec4d111a | 2f6568fce2a6f09c73cdc08342a8b05645c87736 | refs/heads/master | 2020-06-18T08:09:08.217036 | 2019-11-20T16:14:05 | 2019-11-20T16:14:05 | 196,225,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 766 | py | """testideaP URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"729167925@qq.com"
] | 729167925@qq.com |
14ae5a589fdaf24861ddaa9f4769af9675d93a7c | fcf735d736c15cb825f2d6f64f6098a5c4b9e851 | /2016/19.py | 59635417c98280d70020d03853065238c989530a | [] | no_license | csudcy/adventofcode | de3bc7078cd07bc4a8e0c5fff065ede78dc280f1 | a68df4f53f35219351cd5d91fc945c4764112b70 | refs/heads/master | 2020-06-10T13:27:41.389206 | 2016-12-26T09:35:29 | 2016-12-26T09:35:29 | 75,956,067 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,349 | py | NEXT_ELF_INDEX, PRESENT_COUNT = range(2)
def generate_elves(count):
# Return a list of (next_elf_index, present_count)
return [
((i+1) % count, 1)
for i in xrange(count)
]
assert generate_elves(1) == [(0, 1)]
assert generate_elves(3) == [(1, 1), (2, 1), (0, 1)]
def move_presents_once(elves):
elf_count = len(elves)
elf_index = 0
while True:
# Skip elves with no presents
while elves[elf_index][PRESENT_COUNT] == 0:
elf_index = (elf_index + 1) % elf_count
next_elf_index, present_count = elves[elf_index]
# Move to the next next_elf_index with presents
while elves[next_elf_index][PRESENT_COUNT] == 0:
next_elf_index = (next_elf_index + 1) % elf_count
# If I am the only elf with presents left, we're done
total_present_count = present_count + elves[next_elf_index][PRESENT_COUNT]
if total_present_count == elf_count:
return elf_index + 1
# Update elf status
elves[elf_index] = ((next_elf_index+1) % elf_count, total_present_count)
elves[next_elf_index] = (-1, 0)
# Move to the next elf which might have presents
elf_index = (next_elf_index + 1) % elf_count
assert move_presents_once(generate_elves(5)) == 3
print move_presents_once(generate_elves(3017957))
| [
"csudcy@gmail.com"
] | csudcy@gmail.com |
b1864609f51d8bc2bc7cc5aff5a1fa5821989503 | 70f564990215f47b139a777826f211477e9b44f6 | /plan2vec_experiments/baselines/vae_greedy_streetlearn.py | 2b901d932170cea3865e03aaf622f61544a12dae | [] | no_license | geyang/plan2vec | de87f2d77732c4aacdefd00067ebebacb7cd763f | aeeb50aed3d7da4c266b4ca163e96d4c0747e3c1 | refs/heads/master | 2022-11-16T03:40:42.638239 | 2022-10-28T04:01:29 | 2022-10-28T04:01:29 | 261,273,420 | 65 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,951 | py | from plan2vec.plan2vec.plan2vec_streetlearn_2 import main, Args
if __name__ == "__main__":
import jaynes
from plan2vec_experiments import instr, config_charts
Args.load_local_metric = "same as before"
Args.load_global_metric = "same as local metric"
jaynes.config("vector-gpu")
Args.visualization_interval = False
local_metric_exp_path = "/geyang/plan2vec/2019/07-31/streetlearn/local_metric/manhattan-xl/LocalMetricConvDeep/lr-(3e-05)/00.37/13.921429"
for env_id, epx_path in {
"manhattan-tiny":
# "/geyang/plan2vec/2019/07-30/vae/streetlearn_vae/dim-(3)/manhattan-tiny/lr-(0.003)/23.00/08.673423"
"/geyang/plan2vec/2019/07-30/vae/streetlearn_vae-incomplete/dim-(2)/manhattan-tiny/lr-(0.0003)/22.59/48.194736",
"manhattan-small":
# "/geyang/plan2vec/2019/07-30/vae/streetlearn_vae/dim-(3)/manhattan-small/lr-(0.003)/23.00/14.916093",
"/geyang/plan2vec/2019/07-30/vae/streetlearn_vae-incomplete/dim-(2)/manhattan-small/lr-(3e-05)/22.59/51.306878",
"manhattan-medium": "",
# "manhattan-large": ""
}.items():
_ = instr(main,
__postfix=f"{env_id}",
lr=0,
env_id=env_id,
data_path=f"~/fair/streetlearn/processed-data/{env_id}",
plan_steps=1,
neighbor_r=0.9,
evaluate=True, term_r=1.2e-4 * float(2),
num_epochs=200,
visualization_interval=False,
global_metric="LocalMetricConvDeep",
latent_dim=32, # use the same as local metric
load_local_metric=f"{local_metric_exp_path}/models/local_metric_400.pkl",
load_global_metric=f"{local_metric_exp_path}/models/local_metric_400.pkl")
config_charts(path="streetlearn.charts.yml")
jaynes.run(_)
jaynes.listen()
jaynes.listen()
| [
"yangge1987@gmail.com"
] | yangge1987@gmail.com |
f871318e4766122e4899de6ae3d92fe781ce1286 | 9b32771b7d1513ee37bc62dd347675abcfc1bfc9 | /example_snippets/multimenus_snippets/Snippets/SymPy/Calculus/Finite differences.py | 585ccb52399464fbbb07a16d419bd3d5517474f6 | [
"BSD-3-Clause"
] | permissive | listar0810/jupyterlab-snippets-multimenus | 44087ef1aeb030a3074862a337508b57d50072c6 | 477f51cfdbad7409eab45abe53cf774cd70f380c | refs/heads/master | 2022-12-12T18:19:25.221083 | 2020-09-08T01:11:01 | 2020-09-08T01:11:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | dx0, dx1 = symbols("dx0, dx1")
formula = as_finite_diff(f(x).diff(x), [x-dx0, x, x+dx1]) | [
"kptan86@gmail.com"
] | kptan86@gmail.com |
601ff37229751271187e38404475b8cd160483ea | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_1673.py | 8aa1924c9fc02ad862fc48164c8a4e2f33468bbf | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40 | py | # python sqlite check points?
SAVEPOINT
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
5b8e1c159431a579d9e523ef91e56eea4d9be509 | 6d0ca19b8c0f986954135bca68fd3abc558e8ab8 | /PKUTreeMaker/test/Wcrab/crab3_analysisWA.py | cba1ce5ca67f58e076e88a590559cf3217ee9e62 | [] | no_license | AnYpku/Ntuple | 8e018a2980d0440bf48c918a328d75e406df9595 | 7e3a41a7da5ef0005be67e32d615752ca6f130e1 | refs/heads/master | 2020-03-22T14:24:27.658961 | 2018-04-06T12:26:08 | 2018-04-06T12:26:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,155 | py | from WMCore.Configuration import Configuration
config = Configuration()
config.section_("General")
config.General.requestName = 'WA-1'
config.General.transferLogs = True
config.section_("JobType")
config.JobType.pluginName = 'Analysis'
config.JobType.inputFiles = ['Summer16_23Sep2016V3_MC_L1FastJet_AK4PFchs.txt','Summer16_23Sep2016V3_MC_L2Relative_AK4PFchs.txt','Summer16_23Sep2016V3_MC_L3Absolute_AK4PFchs.txt','Summer16_23Sep2016V3_MC_L1FastJet_AK4PFpuppi.txt','Summer16_23Sep2016V3_MC_L2Relative_AK4PFpuppi.txt','Summer16_23Sep2016V3_MC_L3Absolute_AK4PFpuppi.txt']
# Name of the CMSSW configuration file
config.JobType.psetName = 'analysis.py'
config.JobType.allowUndistributedCMSSW = True
config.section_("Data")
config.Data.inputDataset = '/WGToLNuG_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISummer16MiniAODv2-PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/MINIAODSIM'
config.Data.inputDBS = 'global'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob = 1
config.Data.totalUnits = -1
config.Data.publication = False
config.Data.outputDatasetTag = 'WA-1'
config.section_("Site")
config.Site.storageSite = 'T2_CH_CERN'
| [
"1501110102@pku.edu.cn"
] | 1501110102@pku.edu.cn |
35349035fa9deec5305c020cefe758b7875d2922 | 8defed57154f96a27fe368e7fcdc8e58d4ff8b53 | /django_celery/api/tasks.py | 2a39390d3755412eaf1cadeda9bd6d30405d6548 | [] | no_license | defnngj/test_dev4 | 227c5bb53d96adb4cd4583b33c232cfcb4d85874 | 126f11eba933bf06bea4c3ef5e03024675549633 | refs/heads/main | 2023-02-21T17:54:55.098914 | 2021-01-24T10:02:14 | 2021-01-24T10:02:14 | 305,059,110 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,305 | py | import os
from time import sleep
from celery import shared_task
import requests
import unittest
import xmlrunner
from api.test_case.test_interface import RunConfig
API_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEST_CASE = os.path.join(API_DIR, "api", "test_case")
TEST_REPORT = os.path.join(API_DIR, "api", "reports")
print(TEST_CASE)
print(TEST_REPORT)
# @shared_task
# def add(x, y):
# sleep(10)
# return x + y
#
#
# @shared_task
# def mul(x, y):
# return x * y
#
#
# @shared_task
# def xsum(numbers):
# return sum(numbers)
#
# @shared_task
# def test():
# sleep(10)
@shared_task
def running_task(taskID):
RunConfig.task = taskID
print("task id-->", RunConfig.task)
suit = unittest.defaultTestLoader.discover(TEST_CASE)
report = os.path.join(TEST_REPORT, "002.xml")
print("--->", report)
with open(report, 'wb') as output:
xmlrunner.XMLTestRunner(output=output).run(suit)
# 把 xml 报告里面的内容写 测试结果表里面
# r = requests.get('https://api.github.com/events')
# assert r.status_code == 200
#
# suit = unittest.defaultTestLoader.discover(TEST_CASE)
# with open(TEST_REPORT + '/001.xml', 'wb') as output:
# xmlrunner.XMLTestRunner(output=output).run(suit)
# print(suit)
| [
"defnngj@gmail.com"
] | defnngj@gmail.com |
824c7924813e055a9e4abde4e2cc36032b7d9693 | 9ee47ef87f0c9a0bef750f6c380b8c1599a63b69 | /sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2018_02_01/models/_models.py | 7849442cf6b60f1f8249b82d060678dedb7d0f78 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | qinxgit/azure-sdk-for-python | 1ccfd8dc1bccdce5ff451742b9c50ee87a41f918 | 32f3f0c82fe3bac23e968b9393a479ea0e132144 | refs/heads/master | 2021-07-09T16:59:29.610494 | 2020-11-04T22:52:19 | 2020-11-04T22:52:19 | 213,697,974 | 0 | 0 | MIT | 2020-11-04T22:52:20 | 2019-10-08T16:40:46 | null | UTF-8 | Python | false | false | 510,203 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
from msrest.exceptions import HttpOperationError
class AbnormalTimePeriod(Model):
"""Class representing Abnormal Time Period identified in diagnosis.
:param start_time: Start time of the downtime
:type start_time: datetime
:param end_time: End time of the downtime
:type end_time: datetime
:param events: List of Possible Cause of downtime
:type events: list[~azure.mgmt.web.models.DetectorAbnormalTimePeriod]
:param solutions: List of proposed solutions
:type solutions: list[~azure.mgmt.web.models.Solution]
"""
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'events': {'key': 'events', 'type': '[DetectorAbnormalTimePeriod]'},
'solutions': {'key': 'solutions', 'type': '[Solution]'},
}
def __init__(self, **kwargs):
super(AbnormalTimePeriod, self).__init__(**kwargs)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.events = kwargs.get('events', None)
self.solutions = kwargs.get('solutions', None)
class Address(Model):
"""Address information for domain registration.
All required parameters must be populated in order to send to Azure.
:param address1: Required. First line of an Address.
:type address1: str
:param address2: The second line of the Address. Optional.
:type address2: str
:param city: Required. The city for the address.
:type city: str
:param country: Required. The country for the address.
:type country: str
:param postal_code: Required. The postal code for the address.
:type postal_code: str
:param state: Required. The state or province for the address.
:type state: str
"""
_validation = {
'address1': {'required': True},
'city': {'required': True},
'country': {'required': True},
'postal_code': {'required': True},
'state': {'required': True},
}
_attribute_map = {
'address1': {'key': 'address1', 'type': 'str'},
'address2': {'key': 'address2', 'type': 'str'},
'city': {'key': 'city', 'type': 'str'},
'country': {'key': 'country', 'type': 'str'},
'postal_code': {'key': 'postalCode', 'type': 'str'},
'state': {'key': 'state', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Address, self).__init__(**kwargs)
self.address1 = kwargs.get('address1', None)
self.address2 = kwargs.get('address2', None)
self.city = kwargs.get('city', None)
self.country = kwargs.get('country', None)
self.postal_code = kwargs.get('postal_code', None)
self.state = kwargs.get('state', None)
class AddressResponse(Model):
"""Describes main public IP address and any extra virtual IPs.
:param service_ip_address: Main public virtual IP.
:type service_ip_address: str
:param internal_ip_address: Virtual Network internal IP address of the App
Service Environment if it is in internal load-balancing mode.
:type internal_ip_address: str
:param outbound_ip_addresses: IP addresses appearing on outbound
connections.
:type outbound_ip_addresses: list[str]
:param vip_mappings: Additional virtual IPs.
:type vip_mappings: list[~azure.mgmt.web.models.VirtualIPMapping]
"""
_attribute_map = {
'service_ip_address': {'key': 'serviceIpAddress', 'type': 'str'},
'internal_ip_address': {'key': 'internalIpAddress', 'type': 'str'},
'outbound_ip_addresses': {'key': 'outboundIpAddresses', 'type': '[str]'},
'vip_mappings': {'key': 'vipMappings', 'type': '[VirtualIPMapping]'},
}
def __init__(self, **kwargs):
super(AddressResponse, self).__init__(**kwargs)
self.service_ip_address = kwargs.get('service_ip_address', None)
self.internal_ip_address = kwargs.get('internal_ip_address', None)
self.outbound_ip_addresses = kwargs.get('outbound_ip_addresses', None)
self.vip_mappings = kwargs.get('vip_mappings', None)
class AnalysisData(Model):
"""Class Representing Detector Evidence used for analysis.
:param source: Name of the Detector
:type source: str
:param detector_definition: Detector Definition
:type detector_definition: ~azure.mgmt.web.models.DetectorDefinition
:param metrics: Source Metrics
:type metrics: list[~azure.mgmt.web.models.DiagnosticMetricSet]
:param data: Additional Source Data
:type data: list[list[~azure.mgmt.web.models.NameValuePair]]
:param detector_meta_data: Detector Meta Data
:type detector_meta_data: ~azure.mgmt.web.models.ResponseMetaData
"""
_attribute_map = {
'source': {'key': 'source', 'type': 'str'},
'detector_definition': {'key': 'detectorDefinition', 'type': 'DetectorDefinition'},
'metrics': {'key': 'metrics', 'type': '[DiagnosticMetricSet]'},
'data': {'key': 'data', 'type': '[[NameValuePair]]'},
'detector_meta_data': {'key': 'detectorMetaData', 'type': 'ResponseMetaData'},
}
def __init__(self, **kwargs):
super(AnalysisData, self).__init__(**kwargs)
self.source = kwargs.get('source', None)
self.detector_definition = kwargs.get('detector_definition', None)
self.metrics = kwargs.get('metrics', None)
self.data = kwargs.get('data', None)
self.detector_meta_data = kwargs.get('detector_meta_data', None)
class ProxyOnlyResource(Model):
"""Azure proxy only resource. This resource is not tracked by Azure Resource
Manager.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ProxyOnlyResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.kind = kwargs.get('kind', None)
self.type = None
class AnalysisDefinition(ProxyOnlyResource):
"""Definition of Analysis.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar description: Description of the Analysis
:vartype description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AnalysisDefinition, self).__init__(**kwargs)
self.description = None
class ApiDefinitionInfo(Model):
"""Information about the formal API definition for the app.
:param url: The URL of the API definition.
:type url: str
"""
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ApiDefinitionInfo, self).__init__(**kwargs)
self.url = kwargs.get('url', None)
class ApplicationLogsConfig(Model):
"""Application logs configuration.
:param file_system: Application logs to file system configuration.
:type file_system: ~azure.mgmt.web.models.FileSystemApplicationLogsConfig
:param azure_table_storage: Application logs to azure table storage
configuration.
:type azure_table_storage:
~azure.mgmt.web.models.AzureTableStorageApplicationLogsConfig
:param azure_blob_storage: Application logs to blob storage configuration.
:type azure_blob_storage:
~azure.mgmt.web.models.AzureBlobStorageApplicationLogsConfig
"""
_attribute_map = {
'file_system': {'key': 'fileSystem', 'type': 'FileSystemApplicationLogsConfig'},
'azure_table_storage': {'key': 'azureTableStorage', 'type': 'AzureTableStorageApplicationLogsConfig'},
'azure_blob_storage': {'key': 'azureBlobStorage', 'type': 'AzureBlobStorageApplicationLogsConfig'},
}
def __init__(self, **kwargs):
super(ApplicationLogsConfig, self).__init__(**kwargs)
self.file_system = kwargs.get('file_system', None)
self.azure_table_storage = kwargs.get('azure_table_storage', None)
self.azure_blob_storage = kwargs.get('azure_blob_storage', None)
class ApplicationStack(Model):
"""Application stack.
:param name: Application stack name.
:type name: str
:param display: Application stack display name.
:type display: str
:param dependency: Application stack dependency.
:type dependency: str
:param major_versions: List of major versions available.
:type major_versions: list[~azure.mgmt.web.models.StackMajorVersion]
:param frameworks: List of frameworks associated with application stack.
:type frameworks: list[~azure.mgmt.web.models.ApplicationStack]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'str'},
'dependency': {'key': 'dependency', 'type': 'str'},
'major_versions': {'key': 'majorVersions', 'type': '[StackMajorVersion]'},
'frameworks': {'key': 'frameworks', 'type': '[ApplicationStack]'},
}
def __init__(self, **kwargs):
super(ApplicationStack, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display = kwargs.get('display', None)
self.dependency = kwargs.get('dependency', None)
self.major_versions = kwargs.get('major_versions', None)
self.frameworks = kwargs.get('frameworks', None)
class AppServiceCertificate(Model):
"""Key Vault container for a certificate that is purchased through Azure.
Variables are only populated by the server, and will be ignored when
sending a request.
:param key_vault_id: Key Vault resource Id.
:type key_vault_id: str
:param key_vault_secret_name: Key Vault secret name.
:type key_vault_secret_name: str
:ivar provisioning_state: Status of the Key Vault secret. Possible values
include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded',
'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault',
'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist',
'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey',
'Unknown'
:vartype provisioning_state: str or
~azure.mgmt.web.models.KeyVaultSecretStatus
"""
_validation = {
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'key_vault_id': {'key': 'keyVaultId', 'type': 'str'},
'key_vault_secret_name': {'key': 'keyVaultSecretName', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'KeyVaultSecretStatus'},
}
def __init__(self, **kwargs):
super(AppServiceCertificate, self).__init__(**kwargs)
self.key_vault_id = kwargs.get('key_vault_id', None)
self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None)
self.provisioning_state = None
class Resource(Model):
"""Azure resource. This resource is tracked in Azure Resource Manager.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, **kwargs):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.kind = kwargs.get('kind', None)
self.location = kwargs.get('location', None)
self.type = None
self.tags = kwargs.get('tags', None)
class AppServiceCertificateOrder(Resource):
"""SSL certificate purchase order.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param certificates: State of the Key Vault secret.
:type certificates: dict[str,
~azure.mgmt.web.models.AppServiceCertificate]
:param distinguished_name: Certificate distinguished name.
:type distinguished_name: str
:ivar domain_verification_token: Domain verification token.
:vartype domain_verification_token: str
:param validity_in_years: Duration in years (must be between 1 and 3).
Default value: 1 .
:type validity_in_years: int
:param key_size: Certificate key size. Default value: 2048 .
:type key_size: int
:param product_type: Required. Certificate product type. Possible values
include: 'StandardDomainValidatedSsl',
'StandardDomainValidatedWildCardSsl'
:type product_type: str or ~azure.mgmt.web.models.CertificateProductType
:param auto_renew: <code>true</code> if the certificate should be
automatically renewed when it expires; otherwise, <code>false</code>.
Default value: True .
:type auto_renew: bool
:ivar provisioning_state: Status of certificate order. Possible values
include: 'Succeeded', 'Failed', 'Canceled', 'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar status: Current order status. Possible values include:
'Pendingissuance', 'Issued', 'Revoked', 'Canceled', 'Denied',
'Pendingrevocation', 'PendingRekey', 'Unused', 'Expired', 'NotSubmitted'
:vartype status: str or ~azure.mgmt.web.models.CertificateOrderStatus
:ivar signed_certificate: Signed certificate.
:vartype signed_certificate: ~azure.mgmt.web.models.CertificateDetails
:param csr: Last CSR that was created for this order.
:type csr: str
:ivar intermediate: Intermediate certificate.
:vartype intermediate: ~azure.mgmt.web.models.CertificateDetails
:ivar root: Root certificate.
:vartype root: ~azure.mgmt.web.models.CertificateDetails
:ivar serial_number: Current serial number of the certificate.
:vartype serial_number: str
:ivar last_certificate_issuance_time: Certificate last issuance time.
:vartype last_certificate_issuance_time: datetime
:ivar expiration_time: Certificate expiration time.
:vartype expiration_time: datetime
:ivar is_private_key_external: <code>true</code> if private key is
external; otherwise, <code>false</code>.
:vartype is_private_key_external: bool
:ivar app_service_certificate_not_renewable_reasons: Reasons why App
Service Certificate is not renewable at the current moment.
:vartype app_service_certificate_not_renewable_reasons: list[str]
:ivar next_auto_renewal_time_stamp: Time stamp when the certificate would
be auto renewed next
:vartype next_auto_renewal_time_stamp: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'domain_verification_token': {'readonly': True},
'validity_in_years': {'maximum': 3, 'minimum': 1},
'product_type': {'required': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'signed_certificate': {'readonly': True},
'intermediate': {'readonly': True},
'root': {'readonly': True},
'serial_number': {'readonly': True},
'last_certificate_issuance_time': {'readonly': True},
'expiration_time': {'readonly': True},
'is_private_key_external': {'readonly': True},
'app_service_certificate_not_renewable_reasons': {'readonly': True},
'next_auto_renewal_time_stamp': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'certificates': {'key': 'properties.certificates', 'type': '{AppServiceCertificate}'},
'distinguished_name': {'key': 'properties.distinguishedName', 'type': 'str'},
'domain_verification_token': {'key': 'properties.domainVerificationToken', 'type': 'str'},
'validity_in_years': {'key': 'properties.validityInYears', 'type': 'int'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'product_type': {'key': 'properties.productType', 'type': 'CertificateProductType'},
'auto_renew': {'key': 'properties.autoRenew', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'status': {'key': 'properties.status', 'type': 'CertificateOrderStatus'},
'signed_certificate': {'key': 'properties.signedCertificate', 'type': 'CertificateDetails'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'intermediate': {'key': 'properties.intermediate', 'type': 'CertificateDetails'},
'root': {'key': 'properties.root', 'type': 'CertificateDetails'},
'serial_number': {'key': 'properties.serialNumber', 'type': 'str'},
'last_certificate_issuance_time': {'key': 'properties.lastCertificateIssuanceTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'properties.expirationTime', 'type': 'iso-8601'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
'app_service_certificate_not_renewable_reasons': {'key': 'properties.appServiceCertificateNotRenewableReasons', 'type': '[str]'},
'next_auto_renewal_time_stamp': {'key': 'properties.nextAutoRenewalTimeStamp', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(AppServiceCertificateOrder, self).__init__(**kwargs)
self.certificates = kwargs.get('certificates', None)
self.distinguished_name = kwargs.get('distinguished_name', None)
self.domain_verification_token = None
self.validity_in_years = kwargs.get('validity_in_years', 1)
self.key_size = kwargs.get('key_size', 2048)
self.product_type = kwargs.get('product_type', None)
self.auto_renew = kwargs.get('auto_renew', True)
self.provisioning_state = None
self.status = None
self.signed_certificate = None
self.csr = kwargs.get('csr', None)
self.intermediate = None
self.root = None
self.serial_number = None
self.last_certificate_issuance_time = None
self.expiration_time = None
self.is_private_key_external = None
self.app_service_certificate_not_renewable_reasons = None
self.next_auto_renewal_time_stamp = None
class AppServiceCertificateOrderPatchResource(ProxyOnlyResource):
"""ARM resource for a certificate order that is purchased through Azure.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param certificates: State of the Key Vault secret.
:type certificates: dict[str,
~azure.mgmt.web.models.AppServiceCertificate]
:param distinguished_name: Certificate distinguished name.
:type distinguished_name: str
:ivar domain_verification_token: Domain verification token.
:vartype domain_verification_token: str
:param validity_in_years: Duration in years (must be between 1 and 3).
Default value: 1 .
:type validity_in_years: int
:param key_size: Certificate key size. Default value: 2048 .
:type key_size: int
:param product_type: Required. Certificate product type. Possible values
include: 'StandardDomainValidatedSsl',
'StandardDomainValidatedWildCardSsl'
:type product_type: str or ~azure.mgmt.web.models.CertificateProductType
:param auto_renew: <code>true</code> if the certificate should be
automatically renewed when it expires; otherwise, <code>false</code>.
Default value: True .
:type auto_renew: bool
:ivar provisioning_state: Status of certificate order. Possible values
include: 'Succeeded', 'Failed', 'Canceled', 'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar status: Current order status. Possible values include:
'Pendingissuance', 'Issued', 'Revoked', 'Canceled', 'Denied',
'Pendingrevocation', 'PendingRekey', 'Unused', 'Expired', 'NotSubmitted'
:vartype status: str or ~azure.mgmt.web.models.CertificateOrderStatus
:ivar signed_certificate: Signed certificate.
:vartype signed_certificate: ~azure.mgmt.web.models.CertificateDetails
:param csr: Last CSR that was created for this order.
:type csr: str
:ivar intermediate: Intermediate certificate.
:vartype intermediate: ~azure.mgmt.web.models.CertificateDetails
:ivar root: Root certificate.
:vartype root: ~azure.mgmt.web.models.CertificateDetails
:ivar serial_number: Current serial number of the certificate.
:vartype serial_number: str
:ivar last_certificate_issuance_time: Certificate last issuance time.
:vartype last_certificate_issuance_time: datetime
:ivar expiration_time: Certificate expiration time.
:vartype expiration_time: datetime
:ivar is_private_key_external: <code>true</code> if private key is
external; otherwise, <code>false</code>.
:vartype is_private_key_external: bool
:ivar app_service_certificate_not_renewable_reasons: Reasons why App
Service Certificate is not renewable at the current moment.
:vartype app_service_certificate_not_renewable_reasons: list[str]
:ivar next_auto_renewal_time_stamp: Time stamp when the certificate would
be auto renewed next
:vartype next_auto_renewal_time_stamp: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'domain_verification_token': {'readonly': True},
'validity_in_years': {'maximum': 3, 'minimum': 1},
'product_type': {'required': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'signed_certificate': {'readonly': True},
'intermediate': {'readonly': True},
'root': {'readonly': True},
'serial_number': {'readonly': True},
'last_certificate_issuance_time': {'readonly': True},
'expiration_time': {'readonly': True},
'is_private_key_external': {'readonly': True},
'app_service_certificate_not_renewable_reasons': {'readonly': True},
'next_auto_renewal_time_stamp': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'certificates': {'key': 'properties.certificates', 'type': '{AppServiceCertificate}'},
'distinguished_name': {'key': 'properties.distinguishedName', 'type': 'str'},
'domain_verification_token': {'key': 'properties.domainVerificationToken', 'type': 'str'},
'validity_in_years': {'key': 'properties.validityInYears', 'type': 'int'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'product_type': {'key': 'properties.productType', 'type': 'CertificateProductType'},
'auto_renew': {'key': 'properties.autoRenew', 'type': 'bool'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'status': {'key': 'properties.status', 'type': 'CertificateOrderStatus'},
'signed_certificate': {'key': 'properties.signedCertificate', 'type': 'CertificateDetails'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'intermediate': {'key': 'properties.intermediate', 'type': 'CertificateDetails'},
'root': {'key': 'properties.root', 'type': 'CertificateDetails'},
'serial_number': {'key': 'properties.serialNumber', 'type': 'str'},
'last_certificate_issuance_time': {'key': 'properties.lastCertificateIssuanceTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'properties.expirationTime', 'type': 'iso-8601'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
'app_service_certificate_not_renewable_reasons': {'key': 'properties.appServiceCertificateNotRenewableReasons', 'type': '[str]'},
'next_auto_renewal_time_stamp': {'key': 'properties.nextAutoRenewalTimeStamp', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(AppServiceCertificateOrderPatchResource, self).__init__(**kwargs)
self.certificates = kwargs.get('certificates', None)
self.distinguished_name = kwargs.get('distinguished_name', None)
self.domain_verification_token = None
self.validity_in_years = kwargs.get('validity_in_years', 1)
self.key_size = kwargs.get('key_size', 2048)
self.product_type = kwargs.get('product_type', None)
self.auto_renew = kwargs.get('auto_renew', True)
self.provisioning_state = None
self.status = None
self.signed_certificate = None
self.csr = kwargs.get('csr', None)
self.intermediate = None
self.root = None
self.serial_number = None
self.last_certificate_issuance_time = None
self.expiration_time = None
self.is_private_key_external = None
self.app_service_certificate_not_renewable_reasons = None
self.next_auto_renewal_time_stamp = None
class AppServiceCertificatePatchResource(ProxyOnlyResource):
"""Key Vault container ARM resource for a certificate that is purchased
through Azure.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key_vault_id: Key Vault resource Id.
:type key_vault_id: str
:param key_vault_secret_name: Key Vault secret name.
:type key_vault_secret_name: str
:ivar provisioning_state: Status of the Key Vault secret. Possible values
include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded',
'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault',
'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist',
'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey',
'Unknown'
:vartype provisioning_state: str or
~azure.mgmt.web.models.KeyVaultSecretStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'},
'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'},
}
def __init__(self, **kwargs):
super(AppServiceCertificatePatchResource, self).__init__(**kwargs)
self.key_vault_id = kwargs.get('key_vault_id', None)
self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None)
self.provisioning_state = None
class AppServiceCertificateResource(Resource):
"""Key Vault container ARM resource for a certificate that is purchased
through Azure.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param key_vault_id: Key Vault resource Id.
:type key_vault_id: str
:param key_vault_secret_name: Key Vault secret name.
:type key_vault_secret_name: str
:ivar provisioning_state: Status of the Key Vault secret. Possible values
include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded',
'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault',
'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist',
'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey',
'Unknown'
:vartype provisioning_state: str or
~azure.mgmt.web.models.KeyVaultSecretStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'},
'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'},
}
def __init__(self, **kwargs):
super(AppServiceCertificateResource, self).__init__(**kwargs)
self.key_vault_id = kwargs.get('key_vault_id', None)
self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None)
self.provisioning_state = None
class AppServiceEnvironment(Model):
"""Description of an App Service Environment.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. Name of the App Service Environment.
:type name: str
:param location: Required. Location of the App Service Environment, e.g.
"West US".
:type location: str
:ivar provisioning_state: Provisioning state of the App Service
Environment. Possible values include: 'Succeeded', 'Failed', 'Canceled',
'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar status: Current status of the App Service Environment. Possible
values include: 'Preparing', 'Ready', 'Scaling', 'Deleting'
:vartype status: str or ~azure.mgmt.web.models.HostingEnvironmentStatus
:param vnet_name: Name of the Virtual Network for the App Service
Environment.
:type vnet_name: str
:param vnet_resource_group_name: Resource group of the Virtual Network.
:type vnet_resource_group_name: str
:param vnet_subnet_name: Subnet of the Virtual Network.
:type vnet_subnet_name: str
:param virtual_network: Required. Description of the Virtual Network.
:type virtual_network: ~azure.mgmt.web.models.VirtualNetworkProfile
:param internal_load_balancing_mode: Specifies which endpoints to serve
internally in the Virtual Network for the App Service Environment.
Possible values include: 'None', 'Web', 'Publishing'
:type internal_load_balancing_mode: str or
~azure.mgmt.web.models.InternalLoadBalancingMode
:param multi_size: Front-end VM size, e.g. "Medium", "Large".
:type multi_size: str
:param multi_role_count: Number of front-end instances.
:type multi_role_count: int
:param worker_pools: Required. Description of worker pools with worker
size IDs, VM sizes, and number of workers in each pool.
:type worker_pools: list[~azure.mgmt.web.models.WorkerPool]
:param ipssl_address_count: Number of IP SSL addresses reserved for the
App Service Environment.
:type ipssl_address_count: int
:ivar database_edition: Edition of the metadata database for the App
Service Environment, e.g. "Standard".
:vartype database_edition: str
:ivar database_service_objective: Service objective of the metadata
database for the App Service Environment, e.g. "S0".
:vartype database_service_objective: str
:ivar upgrade_domains: Number of upgrade domains of the App Service
Environment.
:vartype upgrade_domains: int
:ivar subscription_id: Subscription of the App Service Environment.
:vartype subscription_id: str
:param dns_suffix: DNS suffix of the App Service Environment.
:type dns_suffix: str
:ivar last_action: Last deployment action on the App Service Environment.
:vartype last_action: str
:ivar last_action_result: Result of the last deployment action on the App
Service Environment.
:vartype last_action_result: str
:ivar allowed_multi_sizes: List of comma separated strings describing
which VM sizes are allowed for front-ends.
:vartype allowed_multi_sizes: str
:ivar allowed_worker_sizes: List of comma separated strings describing
which VM sizes are allowed for workers.
:vartype allowed_worker_sizes: str
:ivar maximum_number_of_machines: Maximum number of VMs in the App Service
Environment.
:vartype maximum_number_of_machines: int
:ivar vip_mappings: Description of IP SSL mapping for the App Service
Environment.
:vartype vip_mappings: list[~azure.mgmt.web.models.VirtualIPMapping]
:ivar environment_capacities: Current total, used, and available worker
capacities.
:vartype environment_capacities:
list[~azure.mgmt.web.models.StampCapacity]
:param network_access_control_list: Access control list for controlling
traffic to the App Service Environment.
:type network_access_control_list:
list[~azure.mgmt.web.models.NetworkAccessControlEntry]
:ivar environment_is_healthy: True/false indicating whether the App
Service Environment is healthy.
:vartype environment_is_healthy: bool
:ivar environment_status: Detailed message about with results of the last
check of the App Service Environment.
:vartype environment_status: str
:ivar resource_group: Resource group of the App Service Environment.
:vartype resource_group: str
:param front_end_scale_factor: Scale factor for front-ends.
:type front_end_scale_factor: int
:ivar default_front_end_scale_factor: Default Scale Factor for FrontEnds.
:vartype default_front_end_scale_factor: int
:param api_management_account_id: API Management Account associated with
the App Service Environment.
:type api_management_account_id: str
:param suspended: <code>true</code> if the App Service Environment is
suspended; otherwise, <code>false</code>. The environment can be
suspended, e.g. when the management endpoint is no longer available
(most likely because NSG blocked the incoming traffic).
:type suspended: bool
:param dynamic_cache_enabled: True/false indicating whether the App
Service Environment is suspended. The environment can be suspended e.g.
when the management endpoint is no longer available
(most likely because NSG blocked the incoming traffic).
:type dynamic_cache_enabled: bool
:param cluster_settings: Custom settings for changing the behavior of the
App Service Environment.
:type cluster_settings: list[~azure.mgmt.web.models.NameValuePair]
:param user_whitelisted_ip_ranges: User added ip ranges to whitelist on
ASE db
:type user_whitelisted_ip_ranges: list[str]
:param has_linux_workers: Flag that displays whether an ASE has linux
workers or not
:type has_linux_workers: bool
:param ssl_cert_key_vault_id: Key Vault ID for ILB App Service Environment
default SSL certificate
:type ssl_cert_key_vault_id: str
:param ssl_cert_key_vault_secret_name: Key Vault Secret Name for ILB App
Service Environment default SSL certificate
:type ssl_cert_key_vault_secret_name: str
"""
_validation = {
'name': {'required': True},
'location': {'required': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'virtual_network': {'required': True},
'worker_pools': {'required': True},
'database_edition': {'readonly': True},
'database_service_objective': {'readonly': True},
'upgrade_domains': {'readonly': True},
'subscription_id': {'readonly': True},
'last_action': {'readonly': True},
'last_action_result': {'readonly': True},
'allowed_multi_sizes': {'readonly': True},
'allowed_worker_sizes': {'readonly': True},
'maximum_number_of_machines': {'readonly': True},
'vip_mappings': {'readonly': True},
'environment_capacities': {'readonly': True},
'environment_is_healthy': {'readonly': True},
'environment_status': {'readonly': True},
'resource_group': {'readonly': True},
'default_front_end_scale_factor': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'provisioning_state': {'key': 'provisioningState', 'type': 'ProvisioningState'},
'status': {'key': 'status', 'type': 'HostingEnvironmentStatus'},
'vnet_name': {'key': 'vnetName', 'type': 'str'},
'vnet_resource_group_name': {'key': 'vnetResourceGroupName', 'type': 'str'},
'vnet_subnet_name': {'key': 'vnetSubnetName', 'type': 'str'},
'virtual_network': {'key': 'virtualNetwork', 'type': 'VirtualNetworkProfile'},
'internal_load_balancing_mode': {'key': 'internalLoadBalancingMode', 'type': 'InternalLoadBalancingMode'},
'multi_size': {'key': 'multiSize', 'type': 'str'},
'multi_role_count': {'key': 'multiRoleCount', 'type': 'int'},
'worker_pools': {'key': 'workerPools', 'type': '[WorkerPool]'},
'ipssl_address_count': {'key': 'ipsslAddressCount', 'type': 'int'},
'database_edition': {'key': 'databaseEdition', 'type': 'str'},
'database_service_objective': {'key': 'databaseServiceObjective', 'type': 'str'},
'upgrade_domains': {'key': 'upgradeDomains', 'type': 'int'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'dns_suffix': {'key': 'dnsSuffix', 'type': 'str'},
'last_action': {'key': 'lastAction', 'type': 'str'},
'last_action_result': {'key': 'lastActionResult', 'type': 'str'},
'allowed_multi_sizes': {'key': 'allowedMultiSizes', 'type': 'str'},
'allowed_worker_sizes': {'key': 'allowedWorkerSizes', 'type': 'str'},
'maximum_number_of_machines': {'key': 'maximumNumberOfMachines', 'type': 'int'},
'vip_mappings': {'key': 'vipMappings', 'type': '[VirtualIPMapping]'},
'environment_capacities': {'key': 'environmentCapacities', 'type': '[StampCapacity]'},
'network_access_control_list': {'key': 'networkAccessControlList', 'type': '[NetworkAccessControlEntry]'},
'environment_is_healthy': {'key': 'environmentIsHealthy', 'type': 'bool'},
'environment_status': {'key': 'environmentStatus', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'front_end_scale_factor': {'key': 'frontEndScaleFactor', 'type': 'int'},
'default_front_end_scale_factor': {'key': 'defaultFrontEndScaleFactor', 'type': 'int'},
'api_management_account_id': {'key': 'apiManagementAccountId', 'type': 'str'},
'suspended': {'key': 'suspended', 'type': 'bool'},
'dynamic_cache_enabled': {'key': 'dynamicCacheEnabled', 'type': 'bool'},
'cluster_settings': {'key': 'clusterSettings', 'type': '[NameValuePair]'},
'user_whitelisted_ip_ranges': {'key': 'userWhitelistedIpRanges', 'type': '[str]'},
'has_linux_workers': {'key': 'hasLinuxWorkers', 'type': 'bool'},
'ssl_cert_key_vault_id': {'key': 'sslCertKeyVaultId', 'type': 'str'},
'ssl_cert_key_vault_secret_name': {'key': 'sslCertKeyVaultSecretName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AppServiceEnvironment, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.location = kwargs.get('location', None)
self.provisioning_state = None
self.status = None
self.vnet_name = kwargs.get('vnet_name', None)
self.vnet_resource_group_name = kwargs.get('vnet_resource_group_name', None)
self.vnet_subnet_name = kwargs.get('vnet_subnet_name', None)
self.virtual_network = kwargs.get('virtual_network', None)
self.internal_load_balancing_mode = kwargs.get('internal_load_balancing_mode', None)
self.multi_size = kwargs.get('multi_size', None)
self.multi_role_count = kwargs.get('multi_role_count', None)
self.worker_pools = kwargs.get('worker_pools', None)
self.ipssl_address_count = kwargs.get('ipssl_address_count', None)
self.database_edition = None
self.database_service_objective = None
self.upgrade_domains = None
self.subscription_id = None
self.dns_suffix = kwargs.get('dns_suffix', None)
self.last_action = None
self.last_action_result = None
self.allowed_multi_sizes = None
self.allowed_worker_sizes = None
self.maximum_number_of_machines = None
self.vip_mappings = None
self.environment_capacities = None
self.network_access_control_list = kwargs.get('network_access_control_list', None)
self.environment_is_healthy = None
self.environment_status = None
self.resource_group = None
self.front_end_scale_factor = kwargs.get('front_end_scale_factor', None)
self.default_front_end_scale_factor = None
self.api_management_account_id = kwargs.get('api_management_account_id', None)
self.suspended = kwargs.get('suspended', None)
self.dynamic_cache_enabled = kwargs.get('dynamic_cache_enabled', None)
self.cluster_settings = kwargs.get('cluster_settings', None)
self.user_whitelisted_ip_ranges = kwargs.get('user_whitelisted_ip_ranges', None)
self.has_linux_workers = kwargs.get('has_linux_workers', None)
self.ssl_cert_key_vault_id = kwargs.get('ssl_cert_key_vault_id', None)
self.ssl_cert_key_vault_secret_name = kwargs.get('ssl_cert_key_vault_secret_name', None)
class AppServiceEnvironmentPatchResource(ProxyOnlyResource):
"""ARM resource for a app service environment.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param app_service_environment_patch_resource_name: Required. Name of the
App Service Environment.
:type app_service_environment_patch_resource_name: str
:param location: Required. Location of the App Service Environment, e.g.
"West US".
:type location: str
:ivar provisioning_state: Provisioning state of the App Service
Environment. Possible values include: 'Succeeded', 'Failed', 'Canceled',
'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar status: Current status of the App Service Environment. Possible
values include: 'Preparing', 'Ready', 'Scaling', 'Deleting'
:vartype status: str or ~azure.mgmt.web.models.HostingEnvironmentStatus
:param vnet_name: Name of the Virtual Network for the App Service
Environment.
:type vnet_name: str
:param vnet_resource_group_name: Resource group of the Virtual Network.
:type vnet_resource_group_name: str
:param vnet_subnet_name: Subnet of the Virtual Network.
:type vnet_subnet_name: str
:param virtual_network: Required. Description of the Virtual Network.
:type virtual_network: ~azure.mgmt.web.models.VirtualNetworkProfile
:param internal_load_balancing_mode: Specifies which endpoints to serve
internally in the Virtual Network for the App Service Environment.
Possible values include: 'None', 'Web', 'Publishing'
:type internal_load_balancing_mode: str or
~azure.mgmt.web.models.InternalLoadBalancingMode
:param multi_size: Front-end VM size, e.g. "Medium", "Large".
:type multi_size: str
:param multi_role_count: Number of front-end instances.
:type multi_role_count: int
:param worker_pools: Required. Description of worker pools with worker
size IDs, VM sizes, and number of workers in each pool.
:type worker_pools: list[~azure.mgmt.web.models.WorkerPool]
:param ipssl_address_count: Number of IP SSL addresses reserved for the
App Service Environment.
:type ipssl_address_count: int
:ivar database_edition: Edition of the metadata database for the App
Service Environment, e.g. "Standard".
:vartype database_edition: str
:ivar database_service_objective: Service objective of the metadata
database for the App Service Environment, e.g. "S0".
:vartype database_service_objective: str
:ivar upgrade_domains: Number of upgrade domains of the App Service
Environment.
:vartype upgrade_domains: int
:ivar subscription_id: Subscription of the App Service Environment.
:vartype subscription_id: str
:param dns_suffix: DNS suffix of the App Service Environment.
:type dns_suffix: str
:ivar last_action: Last deployment action on the App Service Environment.
:vartype last_action: str
:ivar last_action_result: Result of the last deployment action on the App
Service Environment.
:vartype last_action_result: str
:ivar allowed_multi_sizes: List of comma separated strings describing
which VM sizes are allowed for front-ends.
:vartype allowed_multi_sizes: str
:ivar allowed_worker_sizes: List of comma separated strings describing
which VM sizes are allowed for workers.
:vartype allowed_worker_sizes: str
:ivar maximum_number_of_machines: Maximum number of VMs in the App Service
Environment.
:vartype maximum_number_of_machines: int
:ivar vip_mappings: Description of IP SSL mapping for the App Service
Environment.
:vartype vip_mappings: list[~azure.mgmt.web.models.VirtualIPMapping]
:ivar environment_capacities: Current total, used, and available worker
capacities.
:vartype environment_capacities:
list[~azure.mgmt.web.models.StampCapacity]
:param network_access_control_list: Access control list for controlling
traffic to the App Service Environment.
:type network_access_control_list:
list[~azure.mgmt.web.models.NetworkAccessControlEntry]
:ivar environment_is_healthy: True/false indicating whether the App
Service Environment is healthy.
:vartype environment_is_healthy: bool
:ivar environment_status: Detailed message about with results of the last
check of the App Service Environment.
:vartype environment_status: str
:ivar resource_group: Resource group of the App Service Environment.
:vartype resource_group: str
:param front_end_scale_factor: Scale factor for front-ends.
:type front_end_scale_factor: int
:ivar default_front_end_scale_factor: Default Scale Factor for FrontEnds.
:vartype default_front_end_scale_factor: int
:param api_management_account_id: API Management Account associated with
the App Service Environment.
:type api_management_account_id: str
:param suspended: <code>true</code> if the App Service Environment is
suspended; otherwise, <code>false</code>. The environment can be
suspended, e.g. when the management endpoint is no longer available
(most likely because NSG blocked the incoming traffic).
:type suspended: bool
:param dynamic_cache_enabled: True/false indicating whether the App
Service Environment is suspended. The environment can be suspended e.g.
when the management endpoint is no longer available
(most likely because NSG blocked the incoming traffic).
:type dynamic_cache_enabled: bool
:param cluster_settings: Custom settings for changing the behavior of the
App Service Environment.
:type cluster_settings: list[~azure.mgmt.web.models.NameValuePair]
:param user_whitelisted_ip_ranges: User added ip ranges to whitelist on
ASE db
:type user_whitelisted_ip_ranges: list[str]
:param has_linux_workers: Flag that displays whether an ASE has linux
workers or not
:type has_linux_workers: bool
:param ssl_cert_key_vault_id: Key Vault ID for ILB App Service Environment
default SSL certificate
:type ssl_cert_key_vault_id: str
:param ssl_cert_key_vault_secret_name: Key Vault Secret Name for ILB App
Service Environment default SSL certificate
:type ssl_cert_key_vault_secret_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'app_service_environment_patch_resource_name': {'required': True},
'location': {'required': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'virtual_network': {'required': True},
'worker_pools': {'required': True},
'database_edition': {'readonly': True},
'database_service_objective': {'readonly': True},
'upgrade_domains': {'readonly': True},
'subscription_id': {'readonly': True},
'last_action': {'readonly': True},
'last_action_result': {'readonly': True},
'allowed_multi_sizes': {'readonly': True},
'allowed_worker_sizes': {'readonly': True},
'maximum_number_of_machines': {'readonly': True},
'vip_mappings': {'readonly': True},
'environment_capacities': {'readonly': True},
'environment_is_healthy': {'readonly': True},
'environment_status': {'readonly': True},
'resource_group': {'readonly': True},
'default_front_end_scale_factor': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'app_service_environment_patch_resource_name': {'key': 'properties.name', 'type': 'str'},
'location': {'key': 'properties.location', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'status': {'key': 'properties.status', 'type': 'HostingEnvironmentStatus'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'vnet_resource_group_name': {'key': 'properties.vnetResourceGroupName', 'type': 'str'},
'vnet_subnet_name': {'key': 'properties.vnetSubnetName', 'type': 'str'},
'virtual_network': {'key': 'properties.virtualNetwork', 'type': 'VirtualNetworkProfile'},
'internal_load_balancing_mode': {'key': 'properties.internalLoadBalancingMode', 'type': 'InternalLoadBalancingMode'},
'multi_size': {'key': 'properties.multiSize', 'type': 'str'},
'multi_role_count': {'key': 'properties.multiRoleCount', 'type': 'int'},
'worker_pools': {'key': 'properties.workerPools', 'type': '[WorkerPool]'},
'ipssl_address_count': {'key': 'properties.ipsslAddressCount', 'type': 'int'},
'database_edition': {'key': 'properties.databaseEdition', 'type': 'str'},
'database_service_objective': {'key': 'properties.databaseServiceObjective', 'type': 'str'},
'upgrade_domains': {'key': 'properties.upgradeDomains', 'type': 'int'},
'subscription_id': {'key': 'properties.subscriptionId', 'type': 'str'},
'dns_suffix': {'key': 'properties.dnsSuffix', 'type': 'str'},
'last_action': {'key': 'properties.lastAction', 'type': 'str'},
'last_action_result': {'key': 'properties.lastActionResult', 'type': 'str'},
'allowed_multi_sizes': {'key': 'properties.allowedMultiSizes', 'type': 'str'},
'allowed_worker_sizes': {'key': 'properties.allowedWorkerSizes', 'type': 'str'},
'maximum_number_of_machines': {'key': 'properties.maximumNumberOfMachines', 'type': 'int'},
'vip_mappings': {'key': 'properties.vipMappings', 'type': '[VirtualIPMapping]'},
'environment_capacities': {'key': 'properties.environmentCapacities', 'type': '[StampCapacity]'},
'network_access_control_list': {'key': 'properties.networkAccessControlList', 'type': '[NetworkAccessControlEntry]'},
'environment_is_healthy': {'key': 'properties.environmentIsHealthy', 'type': 'bool'},
'environment_status': {'key': 'properties.environmentStatus', 'type': 'str'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'front_end_scale_factor': {'key': 'properties.frontEndScaleFactor', 'type': 'int'},
'default_front_end_scale_factor': {'key': 'properties.defaultFrontEndScaleFactor', 'type': 'int'},
'api_management_account_id': {'key': 'properties.apiManagementAccountId', 'type': 'str'},
'suspended': {'key': 'properties.suspended', 'type': 'bool'},
'dynamic_cache_enabled': {'key': 'properties.dynamicCacheEnabled', 'type': 'bool'},
'cluster_settings': {'key': 'properties.clusterSettings', 'type': '[NameValuePair]'},
'user_whitelisted_ip_ranges': {'key': 'properties.userWhitelistedIpRanges', 'type': '[str]'},
'has_linux_workers': {'key': 'properties.hasLinuxWorkers', 'type': 'bool'},
'ssl_cert_key_vault_id': {'key': 'properties.sslCertKeyVaultId', 'type': 'str'},
'ssl_cert_key_vault_secret_name': {'key': 'properties.sslCertKeyVaultSecretName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AppServiceEnvironmentPatchResource, self).__init__(**kwargs)
self.app_service_environment_patch_resource_name = kwargs.get('app_service_environment_patch_resource_name', None)
self.location = kwargs.get('location', None)
self.provisioning_state = None
self.status = None
self.vnet_name = kwargs.get('vnet_name', None)
self.vnet_resource_group_name = kwargs.get('vnet_resource_group_name', None)
self.vnet_subnet_name = kwargs.get('vnet_subnet_name', None)
self.virtual_network = kwargs.get('virtual_network', None)
self.internal_load_balancing_mode = kwargs.get('internal_load_balancing_mode', None)
self.multi_size = kwargs.get('multi_size', None)
self.multi_role_count = kwargs.get('multi_role_count', None)
self.worker_pools = kwargs.get('worker_pools', None)
self.ipssl_address_count = kwargs.get('ipssl_address_count', None)
self.database_edition = None
self.database_service_objective = None
self.upgrade_domains = None
self.subscription_id = None
self.dns_suffix = kwargs.get('dns_suffix', None)
self.last_action = None
self.last_action_result = None
self.allowed_multi_sizes = None
self.allowed_worker_sizes = None
self.maximum_number_of_machines = None
self.vip_mappings = None
self.environment_capacities = None
self.network_access_control_list = kwargs.get('network_access_control_list', None)
self.environment_is_healthy = None
self.environment_status = None
self.resource_group = None
self.front_end_scale_factor = kwargs.get('front_end_scale_factor', None)
self.default_front_end_scale_factor = None
self.api_management_account_id = kwargs.get('api_management_account_id', None)
self.suspended = kwargs.get('suspended', None)
self.dynamic_cache_enabled = kwargs.get('dynamic_cache_enabled', None)
self.cluster_settings = kwargs.get('cluster_settings', None)
self.user_whitelisted_ip_ranges = kwargs.get('user_whitelisted_ip_ranges', None)
self.has_linux_workers = kwargs.get('has_linux_workers', None)
self.ssl_cert_key_vault_id = kwargs.get('ssl_cert_key_vault_id', None)
self.ssl_cert_key_vault_secret_name = kwargs.get('ssl_cert_key_vault_secret_name', None)
class AppServiceEnvironmentResource(Resource):
"""App Service Environment ARM resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param app_service_environment_resource_name: Required. Name of the App
Service Environment.
:type app_service_environment_resource_name: str
:param app_service_environment_resource_location: Required. Location of
the App Service Environment, e.g. "West US".
:type app_service_environment_resource_location: str
:ivar provisioning_state: Provisioning state of the App Service
Environment. Possible values include: 'Succeeded', 'Failed', 'Canceled',
'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar status: Current status of the App Service Environment. Possible
values include: 'Preparing', 'Ready', 'Scaling', 'Deleting'
:vartype status: str or ~azure.mgmt.web.models.HostingEnvironmentStatus
:param vnet_name: Name of the Virtual Network for the App Service
Environment.
:type vnet_name: str
:param vnet_resource_group_name: Resource group of the Virtual Network.
:type vnet_resource_group_name: str
:param vnet_subnet_name: Subnet of the Virtual Network.
:type vnet_subnet_name: str
:param virtual_network: Required. Description of the Virtual Network.
:type virtual_network: ~azure.mgmt.web.models.VirtualNetworkProfile
:param internal_load_balancing_mode: Specifies which endpoints to serve
internally in the Virtual Network for the App Service Environment.
Possible values include: 'None', 'Web', 'Publishing'
:type internal_load_balancing_mode: str or
~azure.mgmt.web.models.InternalLoadBalancingMode
:param multi_size: Front-end VM size, e.g. "Medium", "Large".
:type multi_size: str
:param multi_role_count: Number of front-end instances.
:type multi_role_count: int
:param worker_pools: Required. Description of worker pools with worker
size IDs, VM sizes, and number of workers in each pool.
:type worker_pools: list[~azure.mgmt.web.models.WorkerPool]
:param ipssl_address_count: Number of IP SSL addresses reserved for the
App Service Environment.
:type ipssl_address_count: int
:ivar database_edition: Edition of the metadata database for the App
Service Environment, e.g. "Standard".
:vartype database_edition: str
:ivar database_service_objective: Service objective of the metadata
database for the App Service Environment, e.g. "S0".
:vartype database_service_objective: str
:ivar upgrade_domains: Number of upgrade domains of the App Service
Environment.
:vartype upgrade_domains: int
:ivar subscription_id: Subscription of the App Service Environment.
:vartype subscription_id: str
:param dns_suffix: DNS suffix of the App Service Environment.
:type dns_suffix: str
:ivar last_action: Last deployment action on the App Service Environment.
:vartype last_action: str
:ivar last_action_result: Result of the last deployment action on the App
Service Environment.
:vartype last_action_result: str
:ivar allowed_multi_sizes: List of comma separated strings describing
which VM sizes are allowed for front-ends.
:vartype allowed_multi_sizes: str
:ivar allowed_worker_sizes: List of comma separated strings describing
which VM sizes are allowed for workers.
:vartype allowed_worker_sizes: str
:ivar maximum_number_of_machines: Maximum number of VMs in the App Service
Environment.
:vartype maximum_number_of_machines: int
:ivar vip_mappings: Description of IP SSL mapping for the App Service
Environment.
:vartype vip_mappings: list[~azure.mgmt.web.models.VirtualIPMapping]
:ivar environment_capacities: Current total, used, and available worker
capacities.
:vartype environment_capacities:
list[~azure.mgmt.web.models.StampCapacity]
:param network_access_control_list: Access control list for controlling
traffic to the App Service Environment.
:type network_access_control_list:
list[~azure.mgmt.web.models.NetworkAccessControlEntry]
:ivar environment_is_healthy: True/false indicating whether the App
Service Environment is healthy.
:vartype environment_is_healthy: bool
:ivar environment_status: Detailed message about with results of the last
check of the App Service Environment.
:vartype environment_status: str
:ivar resource_group: Resource group of the App Service Environment.
:vartype resource_group: str
:param front_end_scale_factor: Scale factor for front-ends.
:type front_end_scale_factor: int
:ivar default_front_end_scale_factor: Default Scale Factor for FrontEnds.
:vartype default_front_end_scale_factor: int
:param api_management_account_id: API Management Account associated with
the App Service Environment.
:type api_management_account_id: str
:param suspended: <code>true</code> if the App Service Environment is
suspended; otherwise, <code>false</code>. The environment can be
suspended, e.g. when the management endpoint is no longer available
(most likely because NSG blocked the incoming traffic).
:type suspended: bool
:param dynamic_cache_enabled: True/false indicating whether the App
Service Environment is suspended. The environment can be suspended e.g.
when the management endpoint is no longer available
(most likely because NSG blocked the incoming traffic).
:type dynamic_cache_enabled: bool
:param cluster_settings: Custom settings for changing the behavior of the
App Service Environment.
:type cluster_settings: list[~azure.mgmt.web.models.NameValuePair]
:param user_whitelisted_ip_ranges: User added ip ranges to whitelist on
ASE db
:type user_whitelisted_ip_ranges: list[str]
:param has_linux_workers: Flag that displays whether an ASE has linux
workers or not
:type has_linux_workers: bool
:param ssl_cert_key_vault_id: Key Vault ID for ILB App Service Environment
default SSL certificate
:type ssl_cert_key_vault_id: str
:param ssl_cert_key_vault_secret_name: Key Vault Secret Name for ILB App
Service Environment default SSL certificate
:type ssl_cert_key_vault_secret_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'app_service_environment_resource_name': {'required': True},
'app_service_environment_resource_location': {'required': True},
'provisioning_state': {'readonly': True},
'status': {'readonly': True},
'virtual_network': {'required': True},
'worker_pools': {'required': True},
'database_edition': {'readonly': True},
'database_service_objective': {'readonly': True},
'upgrade_domains': {'readonly': True},
'subscription_id': {'readonly': True},
'last_action': {'readonly': True},
'last_action_result': {'readonly': True},
'allowed_multi_sizes': {'readonly': True},
'allowed_worker_sizes': {'readonly': True},
'maximum_number_of_machines': {'readonly': True},
'vip_mappings': {'readonly': True},
'environment_capacities': {'readonly': True},
'environment_is_healthy': {'readonly': True},
'environment_status': {'readonly': True},
'resource_group': {'readonly': True},
'default_front_end_scale_factor': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'app_service_environment_resource_name': {'key': 'properties.name', 'type': 'str'},
'app_service_environment_resource_location': {'key': 'properties.location', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'status': {'key': 'properties.status', 'type': 'HostingEnvironmentStatus'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'vnet_resource_group_name': {'key': 'properties.vnetResourceGroupName', 'type': 'str'},
'vnet_subnet_name': {'key': 'properties.vnetSubnetName', 'type': 'str'},
'virtual_network': {'key': 'properties.virtualNetwork', 'type': 'VirtualNetworkProfile'},
'internal_load_balancing_mode': {'key': 'properties.internalLoadBalancingMode', 'type': 'InternalLoadBalancingMode'},
'multi_size': {'key': 'properties.multiSize', 'type': 'str'},
'multi_role_count': {'key': 'properties.multiRoleCount', 'type': 'int'},
'worker_pools': {'key': 'properties.workerPools', 'type': '[WorkerPool]'},
'ipssl_address_count': {'key': 'properties.ipsslAddressCount', 'type': 'int'},
'database_edition': {'key': 'properties.databaseEdition', 'type': 'str'},
'database_service_objective': {'key': 'properties.databaseServiceObjective', 'type': 'str'},
'upgrade_domains': {'key': 'properties.upgradeDomains', 'type': 'int'},
'subscription_id': {'key': 'properties.subscriptionId', 'type': 'str'},
'dns_suffix': {'key': 'properties.dnsSuffix', 'type': 'str'},
'last_action': {'key': 'properties.lastAction', 'type': 'str'},
'last_action_result': {'key': 'properties.lastActionResult', 'type': 'str'},
'allowed_multi_sizes': {'key': 'properties.allowedMultiSizes', 'type': 'str'},
'allowed_worker_sizes': {'key': 'properties.allowedWorkerSizes', 'type': 'str'},
'maximum_number_of_machines': {'key': 'properties.maximumNumberOfMachines', 'type': 'int'},
'vip_mappings': {'key': 'properties.vipMappings', 'type': '[VirtualIPMapping]'},
'environment_capacities': {'key': 'properties.environmentCapacities', 'type': '[StampCapacity]'},
'network_access_control_list': {'key': 'properties.networkAccessControlList', 'type': '[NetworkAccessControlEntry]'},
'environment_is_healthy': {'key': 'properties.environmentIsHealthy', 'type': 'bool'},
'environment_status': {'key': 'properties.environmentStatus', 'type': 'str'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'front_end_scale_factor': {'key': 'properties.frontEndScaleFactor', 'type': 'int'},
'default_front_end_scale_factor': {'key': 'properties.defaultFrontEndScaleFactor', 'type': 'int'},
'api_management_account_id': {'key': 'properties.apiManagementAccountId', 'type': 'str'},
'suspended': {'key': 'properties.suspended', 'type': 'bool'},
'dynamic_cache_enabled': {'key': 'properties.dynamicCacheEnabled', 'type': 'bool'},
'cluster_settings': {'key': 'properties.clusterSettings', 'type': '[NameValuePair]'},
'user_whitelisted_ip_ranges': {'key': 'properties.userWhitelistedIpRanges', 'type': '[str]'},
'has_linux_workers': {'key': 'properties.hasLinuxWorkers', 'type': 'bool'},
'ssl_cert_key_vault_id': {'key': 'properties.sslCertKeyVaultId', 'type': 'str'},
'ssl_cert_key_vault_secret_name': {'key': 'properties.sslCertKeyVaultSecretName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AppServiceEnvironmentResource, self).__init__(**kwargs)
self.app_service_environment_resource_name = kwargs.get('app_service_environment_resource_name', None)
self.app_service_environment_resource_location = kwargs.get('app_service_environment_resource_location', None)
self.provisioning_state = None
self.status = None
self.vnet_name = kwargs.get('vnet_name', None)
self.vnet_resource_group_name = kwargs.get('vnet_resource_group_name', None)
self.vnet_subnet_name = kwargs.get('vnet_subnet_name', None)
self.virtual_network = kwargs.get('virtual_network', None)
self.internal_load_balancing_mode = kwargs.get('internal_load_balancing_mode', None)
self.multi_size = kwargs.get('multi_size', None)
self.multi_role_count = kwargs.get('multi_role_count', None)
self.worker_pools = kwargs.get('worker_pools', None)
self.ipssl_address_count = kwargs.get('ipssl_address_count', None)
self.database_edition = None
self.database_service_objective = None
self.upgrade_domains = None
self.subscription_id = None
self.dns_suffix = kwargs.get('dns_suffix', None)
self.last_action = None
self.last_action_result = None
self.allowed_multi_sizes = None
self.allowed_worker_sizes = None
self.maximum_number_of_machines = None
self.vip_mappings = None
self.environment_capacities = None
self.network_access_control_list = kwargs.get('network_access_control_list', None)
self.environment_is_healthy = None
self.environment_status = None
self.resource_group = None
self.front_end_scale_factor = kwargs.get('front_end_scale_factor', None)
self.default_front_end_scale_factor = None
self.api_management_account_id = kwargs.get('api_management_account_id', None)
self.suspended = kwargs.get('suspended', None)
self.dynamic_cache_enabled = kwargs.get('dynamic_cache_enabled', None)
self.cluster_settings = kwargs.get('cluster_settings', None)
self.user_whitelisted_ip_ranges = kwargs.get('user_whitelisted_ip_ranges', None)
self.has_linux_workers = kwargs.get('has_linux_workers', None)
self.ssl_cert_key_vault_id = kwargs.get('ssl_cert_key_vault_id', None)
self.ssl_cert_key_vault_secret_name = kwargs.get('ssl_cert_key_vault_secret_name', None)
class AppServicePlan(Resource):
"""App Service plan.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param worker_tier_name: Target worker tier assigned to the App Service
plan.
:type worker_tier_name: str
:ivar status: App Service plan status. Possible values include: 'Ready',
'Pending', 'Creating'
:vartype status: str or ~azure.mgmt.web.models.StatusOptions
:ivar subscription: App Service plan subscription.
:vartype subscription: str
:param hosting_environment_profile: Specification for the App Service
Environment to use for the App Service plan.
:type hosting_environment_profile:
~azure.mgmt.web.models.HostingEnvironmentProfile
:ivar maximum_number_of_workers: Maximum number of instances that can be
assigned to this App Service plan.
:vartype maximum_number_of_workers: int
:ivar geo_region: Geographical location for the App Service plan.
:vartype geo_region: str
:param per_site_scaling: If <code>true</code>, apps assigned to this App
Service plan can be scaled independently.
If <code>false</code>, apps assigned to this App Service plan will scale
to all instances of the plan. Default value: False .
:type per_site_scaling: bool
:param maximum_elastic_worker_count: Maximum number of total workers
allowed for this ElasticScaleEnabled App Service Plan
:type maximum_elastic_worker_count: int
:ivar number_of_sites: Number of apps assigned to this App Service plan.
:vartype number_of_sites: int
:param is_spot: If <code>true</code>, this App Service Plan owns spot
instances.
:type is_spot: bool
:param spot_expiration_time: The time when the server farm expires. Valid
only if it is a spot server farm.
:type spot_expiration_time: datetime
:param free_offer_expiration_time: The time when the server farm free
offer expires.
:type free_offer_expiration_time: datetime
:ivar resource_group: Resource group of the App Service plan.
:vartype resource_group: str
:param reserved: If Linux app service plan <code>true</code>,
<code>false</code> otherwise. Default value: False .
:type reserved: bool
:param is_xenon: Obsolete: If Hyper-V container app service plan
<code>true</code>, <code>false</code> otherwise. Default value: False .
:type is_xenon: bool
:param hyper_v: If Hyper-V container app service plan <code>true</code>,
<code>false</code> otherwise. Default value: False .
:type hyper_v: bool
:param target_worker_count: Scaling worker count.
:type target_worker_count: int
:param target_worker_size_id: Scaling worker size ID.
:type target_worker_size_id: int
:ivar provisioning_state: Provisioning state of the App Service
Environment. Possible values include: 'Succeeded', 'Failed', 'Canceled',
'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:param sku:
:type sku: ~azure.mgmt.web.models.SkuDescription
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'status': {'readonly': True},
'subscription': {'readonly': True},
'maximum_number_of_workers': {'readonly': True},
'geo_region': {'readonly': True},
'number_of_sites': {'readonly': True},
'resource_group': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'worker_tier_name': {'key': 'properties.workerTierName', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'StatusOptions'},
'subscription': {'key': 'properties.subscription', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'maximum_number_of_workers': {'key': 'properties.maximumNumberOfWorkers', 'type': 'int'},
'geo_region': {'key': 'properties.geoRegion', 'type': 'str'},
'per_site_scaling': {'key': 'properties.perSiteScaling', 'type': 'bool'},
'maximum_elastic_worker_count': {'key': 'properties.maximumElasticWorkerCount', 'type': 'int'},
'number_of_sites': {'key': 'properties.numberOfSites', 'type': 'int'},
'is_spot': {'key': 'properties.isSpot', 'type': 'bool'},
'spot_expiration_time': {'key': 'properties.spotExpirationTime', 'type': 'iso-8601'},
'free_offer_expiration_time': {'key': 'properties.freeOfferExpirationTime', 'type': 'iso-8601'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'is_xenon': {'key': 'properties.isXenon', 'type': 'bool'},
'hyper_v': {'key': 'properties.hyperV', 'type': 'bool'},
'target_worker_count': {'key': 'properties.targetWorkerCount', 'type': 'int'},
'target_worker_size_id': {'key': 'properties.targetWorkerSizeId', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'sku': {'key': 'sku', 'type': 'SkuDescription'},
}
def __init__(self, **kwargs):
super(AppServicePlan, self).__init__(**kwargs)
self.worker_tier_name = kwargs.get('worker_tier_name', None)
self.status = None
self.subscription = None
self.hosting_environment_profile = kwargs.get('hosting_environment_profile', None)
self.maximum_number_of_workers = None
self.geo_region = None
self.per_site_scaling = kwargs.get('per_site_scaling', False)
self.maximum_elastic_worker_count = kwargs.get('maximum_elastic_worker_count', None)
self.number_of_sites = None
self.is_spot = kwargs.get('is_spot', None)
self.spot_expiration_time = kwargs.get('spot_expiration_time', None)
self.free_offer_expiration_time = kwargs.get('free_offer_expiration_time', None)
self.resource_group = None
self.reserved = kwargs.get('reserved', False)
self.is_xenon = kwargs.get('is_xenon', False)
self.hyper_v = kwargs.get('hyper_v', False)
self.target_worker_count = kwargs.get('target_worker_count', None)
self.target_worker_size_id = kwargs.get('target_worker_size_id', None)
self.provisioning_state = None
self.sku = kwargs.get('sku', None)
class AppServicePlanPatchResource(ProxyOnlyResource):
"""ARM resource for a app service plan.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param worker_tier_name: Target worker tier assigned to the App Service
plan.
:type worker_tier_name: str
:ivar status: App Service plan status. Possible values include: 'Ready',
'Pending', 'Creating'
:vartype status: str or ~azure.mgmt.web.models.StatusOptions
:ivar subscription: App Service plan subscription.
:vartype subscription: str
:param hosting_environment_profile: Specification for the App Service
Environment to use for the App Service plan.
:type hosting_environment_profile:
~azure.mgmt.web.models.HostingEnvironmentProfile
:ivar maximum_number_of_workers: Maximum number of instances that can be
assigned to this App Service plan.
:vartype maximum_number_of_workers: int
:ivar geo_region: Geographical location for the App Service plan.
:vartype geo_region: str
:param per_site_scaling: If <code>true</code>, apps assigned to this App
Service plan can be scaled independently.
If <code>false</code>, apps assigned to this App Service plan will scale
to all instances of the plan. Default value: False .
:type per_site_scaling: bool
:param maximum_elastic_worker_count: Maximum number of total workers
allowed for this ElasticScaleEnabled App Service Plan
:type maximum_elastic_worker_count: int
:ivar number_of_sites: Number of apps assigned to this App Service plan.
:vartype number_of_sites: int
:param is_spot: If <code>true</code>, this App Service Plan owns spot
instances.
:type is_spot: bool
:param spot_expiration_time: The time when the server farm expires. Valid
only if it is a spot server farm.
:type spot_expiration_time: datetime
:param free_offer_expiration_time: The time when the server farm free
offer expires.
:type free_offer_expiration_time: datetime
:ivar resource_group: Resource group of the App Service plan.
:vartype resource_group: str
:param reserved: If Linux app service plan <code>true</code>,
<code>false</code> otherwise. Default value: False .
:type reserved: bool
:param is_xenon: Obsolete: If Hyper-V container app service plan
<code>true</code>, <code>false</code> otherwise. Default value: False .
:type is_xenon: bool
:param hyper_v: If Hyper-V container app service plan <code>true</code>,
<code>false</code> otherwise. Default value: False .
:type hyper_v: bool
:param target_worker_count: Scaling worker count.
:type target_worker_count: int
:param target_worker_size_id: Scaling worker size ID.
:type target_worker_size_id: int
:ivar provisioning_state: Provisioning state of the App Service
Environment. Possible values include: 'Succeeded', 'Failed', 'Canceled',
'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'status': {'readonly': True},
'subscription': {'readonly': True},
'maximum_number_of_workers': {'readonly': True},
'geo_region': {'readonly': True},
'number_of_sites': {'readonly': True},
'resource_group': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'worker_tier_name': {'key': 'properties.workerTierName', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'StatusOptions'},
'subscription': {'key': 'properties.subscription', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'maximum_number_of_workers': {'key': 'properties.maximumNumberOfWorkers', 'type': 'int'},
'geo_region': {'key': 'properties.geoRegion', 'type': 'str'},
'per_site_scaling': {'key': 'properties.perSiteScaling', 'type': 'bool'},
'maximum_elastic_worker_count': {'key': 'properties.maximumElasticWorkerCount', 'type': 'int'},
'number_of_sites': {'key': 'properties.numberOfSites', 'type': 'int'},
'is_spot': {'key': 'properties.isSpot', 'type': 'bool'},
'spot_expiration_time': {'key': 'properties.spotExpirationTime', 'type': 'iso-8601'},
'free_offer_expiration_time': {'key': 'properties.freeOfferExpirationTime', 'type': 'iso-8601'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'is_xenon': {'key': 'properties.isXenon', 'type': 'bool'},
'hyper_v': {'key': 'properties.hyperV', 'type': 'bool'},
'target_worker_count': {'key': 'properties.targetWorkerCount', 'type': 'int'},
'target_worker_size_id': {'key': 'properties.targetWorkerSizeId', 'type': 'int'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
}
def __init__(self, **kwargs):
super(AppServicePlanPatchResource, self).__init__(**kwargs)
self.worker_tier_name = kwargs.get('worker_tier_name', None)
self.status = None
self.subscription = None
self.hosting_environment_profile = kwargs.get('hosting_environment_profile', None)
self.maximum_number_of_workers = None
self.geo_region = None
self.per_site_scaling = kwargs.get('per_site_scaling', False)
self.maximum_elastic_worker_count = kwargs.get('maximum_elastic_worker_count', None)
self.number_of_sites = None
self.is_spot = kwargs.get('is_spot', None)
self.spot_expiration_time = kwargs.get('spot_expiration_time', None)
self.free_offer_expiration_time = kwargs.get('free_offer_expiration_time', None)
self.resource_group = None
self.reserved = kwargs.get('reserved', False)
self.is_xenon = kwargs.get('is_xenon', False)
self.hyper_v = kwargs.get('hyper_v', False)
self.target_worker_count = kwargs.get('target_worker_count', None)
self.target_worker_size_id = kwargs.get('target_worker_size_id', None)
self.provisioning_state = None
class AutoHealActions(Model):
"""Actions which to take by the auto-heal module when a rule is triggered.
:param action_type: Predefined action to be taken. Possible values
include: 'Recycle', 'LogEvent', 'CustomAction'
:type action_type: str or ~azure.mgmt.web.models.AutoHealActionType
:param custom_action: Custom action to be taken.
:type custom_action: ~azure.mgmt.web.models.AutoHealCustomAction
:param min_process_execution_time: Minimum time the process must execute
before taking the action
:type min_process_execution_time: str
"""
_attribute_map = {
'action_type': {'key': 'actionType', 'type': 'AutoHealActionType'},
'custom_action': {'key': 'customAction', 'type': 'AutoHealCustomAction'},
'min_process_execution_time': {'key': 'minProcessExecutionTime', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AutoHealActions, self).__init__(**kwargs)
self.action_type = kwargs.get('action_type', None)
self.custom_action = kwargs.get('custom_action', None)
self.min_process_execution_time = kwargs.get('min_process_execution_time', None)
class AutoHealCustomAction(Model):
"""Custom action to be executed
when an auto heal rule is triggered.
:param exe: Executable to be run.
:type exe: str
:param parameters: Parameters for the executable.
:type parameters: str
"""
_attribute_map = {
'exe': {'key': 'exe', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AutoHealCustomAction, self).__init__(**kwargs)
self.exe = kwargs.get('exe', None)
self.parameters = kwargs.get('parameters', None)
class AutoHealRules(Model):
"""Rules that can be defined for auto-heal.
:param triggers: Conditions that describe when to execute the auto-heal
actions.
:type triggers: ~azure.mgmt.web.models.AutoHealTriggers
:param actions: Actions to be executed when a rule is triggered.
:type actions: ~azure.mgmt.web.models.AutoHealActions
"""
_attribute_map = {
'triggers': {'key': 'triggers', 'type': 'AutoHealTriggers'},
'actions': {'key': 'actions', 'type': 'AutoHealActions'},
}
def __init__(self, **kwargs):
super(AutoHealRules, self).__init__(**kwargs)
self.triggers = kwargs.get('triggers', None)
self.actions = kwargs.get('actions', None)
class AutoHealTriggers(Model):
"""Triggers for auto-heal.
:param requests: A rule based on total requests.
:type requests: ~azure.mgmt.web.models.RequestsBasedTrigger
:param private_bytes_in_kb: A rule based on private bytes.
:type private_bytes_in_kb: int
:param status_codes: A rule based on status codes.
:type status_codes: list[~azure.mgmt.web.models.StatusCodesBasedTrigger]
:param slow_requests: A rule based on request execution time.
:type slow_requests: ~azure.mgmt.web.models.SlowRequestsBasedTrigger
"""
_attribute_map = {
'requests': {'key': 'requests', 'type': 'RequestsBasedTrigger'},
'private_bytes_in_kb': {'key': 'privateBytesInKB', 'type': 'int'},
'status_codes': {'key': 'statusCodes', 'type': '[StatusCodesBasedTrigger]'},
'slow_requests': {'key': 'slowRequests', 'type': 'SlowRequestsBasedTrigger'},
}
def __init__(self, **kwargs):
super(AutoHealTriggers, self).__init__(**kwargs)
self.requests = kwargs.get('requests', None)
self.private_bytes_in_kb = kwargs.get('private_bytes_in_kb', None)
self.status_codes = kwargs.get('status_codes', None)
self.slow_requests = kwargs.get('slow_requests', None)
class AzureBlobStorageApplicationLogsConfig(Model):
"""Application logs azure blob storage configuration.
:param level: Log level. Possible values include: 'Off', 'Verbose',
'Information', 'Warning', 'Error'
:type level: str or ~azure.mgmt.web.models.LogLevel
:param sas_url: SAS url to a azure blob container with
read/write/list/delete permissions.
:type sas_url: str
:param retention_in_days: Retention in days.
Remove blobs older than X days.
0 or lower means no retention.
:type retention_in_days: int
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'LogLevel'},
'sas_url': {'key': 'sasUrl', 'type': 'str'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
}
def __init__(self, **kwargs):
super(AzureBlobStorageApplicationLogsConfig, self).__init__(**kwargs)
self.level = kwargs.get('level', None)
self.sas_url = kwargs.get('sas_url', None)
self.retention_in_days = kwargs.get('retention_in_days', None)
class AzureBlobStorageHttpLogsConfig(Model):
"""Http logs to azure blob storage configuration.
:param sas_url: SAS url to a azure blob container with
read/write/list/delete permissions.
:type sas_url: str
:param retention_in_days: Retention in days.
Remove blobs older than X days.
0 or lower means no retention.
:type retention_in_days: int
:param enabled: True if configuration is enabled, false if it is disabled
and null if configuration is not set.
:type enabled: bool
"""
_attribute_map = {
'sas_url': {'key': 'sasUrl', 'type': 'str'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(AzureBlobStorageHttpLogsConfig, self).__init__(**kwargs)
self.sas_url = kwargs.get('sas_url', None)
self.retention_in_days = kwargs.get('retention_in_days', None)
self.enabled = kwargs.get('enabled', None)
class AzureStorageInfoValue(Model):
"""Azure Files or Blob Storage access information value for dictionary
storage.
Variables are only populated by the server, and will be ignored when
sending a request.
:param type: Type of storage. Possible values include: 'AzureFiles',
'AzureBlob'
:type type: str or ~azure.mgmt.web.models.AzureStorageType
:param account_name: Name of the storage account.
:type account_name: str
:param share_name: Name of the file share (container name, for Blob
storage).
:type share_name: str
:param access_key: Access key for the storage account.
:type access_key: str
:param mount_path: Path to mount the storage within the site's runtime
environment.
:type mount_path: str
:ivar state: State of the storage account. Possible values include: 'Ok',
'InvalidCredentials', 'InvalidShare'
:vartype state: str or ~azure.mgmt.web.models.AzureStorageState
"""
_validation = {
'state': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'AzureStorageType'},
'account_name': {'key': 'accountName', 'type': 'str'},
'share_name': {'key': 'shareName', 'type': 'str'},
'access_key': {'key': 'accessKey', 'type': 'str'},
'mount_path': {'key': 'mountPath', 'type': 'str'},
'state': {'key': 'state', 'type': 'AzureStorageState'},
}
def __init__(self, **kwargs):
super(AzureStorageInfoValue, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.account_name = kwargs.get('account_name', None)
self.share_name = kwargs.get('share_name', None)
self.access_key = kwargs.get('access_key', None)
self.mount_path = kwargs.get('mount_path', None)
self.state = None
class AzureStoragePropertyDictionaryResource(ProxyOnlyResource):
"""AzureStorageInfo dictionary resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Azure storage accounts.
:type properties: dict[str, ~azure.mgmt.web.models.AzureStorageInfoValue]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{AzureStorageInfoValue}'},
}
def __init__(self, **kwargs):
super(AzureStoragePropertyDictionaryResource, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class AzureTableStorageApplicationLogsConfig(Model):
"""Application logs to Azure table storage configuration.
All required parameters must be populated in order to send to Azure.
:param level: Log level. Possible values include: 'Off', 'Verbose',
'Information', 'Warning', 'Error'
:type level: str or ~azure.mgmt.web.models.LogLevel
:param sas_url: Required. SAS URL to an Azure table with add/query/delete
permissions.
:type sas_url: str
"""
_validation = {
'sas_url': {'required': True},
}
_attribute_map = {
'level': {'key': 'level', 'type': 'LogLevel'},
'sas_url': {'key': 'sasUrl', 'type': 'str'},
}
def __init__(self, **kwargs):
super(AzureTableStorageApplicationLogsConfig, self).__init__(**kwargs)
self.level = kwargs.get('level', None)
self.sas_url = kwargs.get('sas_url', None)
class BackupItem(ProxyOnlyResource):
"""Backup description.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar backup_id: Id of the backup.
:vartype backup_id: int
:ivar storage_account_url: SAS URL for the storage account container which
contains this backup.
:vartype storage_account_url: str
:ivar blob_name: Name of the blob which contains data for this backup.
:vartype blob_name: str
:ivar backup_item_name: Name of this backup.
:vartype backup_item_name: str
:ivar status: Backup status. Possible values include: 'InProgress',
'Failed', 'Succeeded', 'TimedOut', 'Created', 'Skipped',
'PartiallySucceeded', 'DeleteInProgress', 'DeleteFailed', 'Deleted'
:vartype status: str or ~azure.mgmt.web.models.BackupItemStatus
:ivar size_in_bytes: Size of the backup in bytes.
:vartype size_in_bytes: long
:ivar created: Timestamp of the backup creation.
:vartype created: datetime
:ivar log: Details regarding this backup. Might contain an error message.
:vartype log: str
:ivar databases: List of databases included in the backup.
:vartype databases: list[~azure.mgmt.web.models.DatabaseBackupSetting]
:ivar scheduled: True if this backup has been created due to a schedule
being triggered.
:vartype scheduled: bool
:ivar last_restore_time_stamp: Timestamp of a last restore operation which
used this backup.
:vartype last_restore_time_stamp: datetime
:ivar finished_time_stamp: Timestamp when this backup finished.
:vartype finished_time_stamp: datetime
:ivar correlation_id: Unique correlation identifier. Please use this along
with the timestamp while communicating with Azure support.
:vartype correlation_id: str
:ivar website_size_in_bytes: Size of the original web app which has been
backed up.
:vartype website_size_in_bytes: long
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'backup_id': {'readonly': True},
'storage_account_url': {'readonly': True},
'blob_name': {'readonly': True},
'backup_item_name': {'readonly': True},
'status': {'readonly': True},
'size_in_bytes': {'readonly': True},
'created': {'readonly': True},
'log': {'readonly': True},
'databases': {'readonly': True},
'scheduled': {'readonly': True},
'last_restore_time_stamp': {'readonly': True},
'finished_time_stamp': {'readonly': True},
'correlation_id': {'readonly': True},
'website_size_in_bytes': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backup_id': {'key': 'properties.id', 'type': 'int'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'blob_name': {'key': 'properties.blobName', 'type': 'str'},
'backup_item_name': {'key': 'properties.name', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'BackupItemStatus'},
'size_in_bytes': {'key': 'properties.sizeInBytes', 'type': 'long'},
'created': {'key': 'properties.created', 'type': 'iso-8601'},
'log': {'key': 'properties.log', 'type': 'str'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'scheduled': {'key': 'properties.scheduled', 'type': 'bool'},
'last_restore_time_stamp': {'key': 'properties.lastRestoreTimeStamp', 'type': 'iso-8601'},
'finished_time_stamp': {'key': 'properties.finishedTimeStamp', 'type': 'iso-8601'},
'correlation_id': {'key': 'properties.correlationId', 'type': 'str'},
'website_size_in_bytes': {'key': 'properties.websiteSizeInBytes', 'type': 'long'},
}
def __init__(self, **kwargs):
super(BackupItem, self).__init__(**kwargs)
self.backup_id = None
self.storage_account_url = None
self.blob_name = None
self.backup_item_name = None
self.status = None
self.size_in_bytes = None
self.created = None
self.log = None
self.databases = None
self.scheduled = None
self.last_restore_time_stamp = None
self.finished_time_stamp = None
self.correlation_id = None
self.website_size_in_bytes = None
class BackupRequest(ProxyOnlyResource):
"""Description of a backup which will be performed.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param backup_name: Name of the backup.
:type backup_name: str
:param enabled: True if the backup schedule is enabled (must be included
in that case), false if the backup schedule should be disabled.
:type enabled: bool
:param storage_account_url: Required. SAS URL to the container.
:type storage_account_url: str
:param backup_schedule: Schedule for the backup if it is executed
periodically.
:type backup_schedule: ~azure.mgmt.web.models.BackupSchedule
:param databases: Databases included in the backup.
:type databases: list[~azure.mgmt.web.models.DatabaseBackupSetting]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'storage_account_url': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backup_name': {'key': 'properties.backupName', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'backup_schedule': {'key': 'properties.backupSchedule', 'type': 'BackupSchedule'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
}
def __init__(self, **kwargs):
super(BackupRequest, self).__init__(**kwargs)
self.backup_name = kwargs.get('backup_name', None)
self.enabled = kwargs.get('enabled', None)
self.storage_account_url = kwargs.get('storage_account_url', None)
self.backup_schedule = kwargs.get('backup_schedule', None)
self.databases = kwargs.get('databases', None)
class BackupSchedule(Model):
"""Description of a backup schedule. Describes how often should be the backup
performed and what should be the retention policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param frequency_interval: Required. How often the backup should be
executed (e.g. for weekly backup, this should be set to 7 and
FrequencyUnit should be set to Day). Default value: 7 .
:type frequency_interval: int
:param frequency_unit: Required. The unit of time for how often the backup
should be executed (e.g. for weekly backup, this should be set to Day and
FrequencyInterval should be set to 7). Possible values include: 'Day',
'Hour'. Default value: "Day" .
:type frequency_unit: str or ~azure.mgmt.web.models.FrequencyUnit
:param keep_at_least_one_backup: Required. True if the retention policy
should always keep at least one backup in the storage account, regardless
how old it is; false otherwise. Default value: True .
:type keep_at_least_one_backup: bool
:param retention_period_in_days: Required. After how many days backups
should be deleted. Default value: 30 .
:type retention_period_in_days: int
:param start_time: When the schedule should start working.
:type start_time: datetime
:ivar last_execution_time: Last time when this schedule was triggered.
:vartype last_execution_time: datetime
"""
_validation = {
'frequency_interval': {'required': True},
'frequency_unit': {'required': True},
'keep_at_least_one_backup': {'required': True},
'retention_period_in_days': {'required': True},
'last_execution_time': {'readonly': True},
}
_attribute_map = {
'frequency_interval': {'key': 'frequencyInterval', 'type': 'int'},
'frequency_unit': {'key': 'frequencyUnit', 'type': 'FrequencyUnit'},
'keep_at_least_one_backup': {'key': 'keepAtLeastOneBackup', 'type': 'bool'},
'retention_period_in_days': {'key': 'retentionPeriodInDays', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'last_execution_time': {'key': 'lastExecutionTime', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(BackupSchedule, self).__init__(**kwargs)
self.frequency_interval = kwargs.get('frequency_interval', 7)
self.frequency_unit = kwargs.get('frequency_unit', "Day")
self.keep_at_least_one_backup = kwargs.get('keep_at_least_one_backup', True)
self.retention_period_in_days = kwargs.get('retention_period_in_days', 30)
self.start_time = kwargs.get('start_time', None)
self.last_execution_time = None
class BillingMeter(ProxyOnlyResource):
"""App Service billing entity that contains information about meter which the
Azure billing system utilizes to charge users for services.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param meter_id: Meter GUID onboarded in Commerce
:type meter_id: str
:param billing_location: Azure Location of billable resource
:type billing_location: str
:param short_name: Short Name from App Service Azure pricing Page
:type short_name: str
:param friendly_name: Friendly name of the meter
:type friendly_name: str
:param resource_type: App Service ResourceType meter used for
:type resource_type: str
:param os_type: App Service OS type meter used for
:type os_type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'meter_id': {'key': 'properties.meterId', 'type': 'str'},
'billing_location': {'key': 'properties.billingLocation', 'type': 'str'},
'short_name': {'key': 'properties.shortName', 'type': 'str'},
'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
'resource_type': {'key': 'properties.resourceType', 'type': 'str'},
'os_type': {'key': 'properties.osType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(BillingMeter, self).__init__(**kwargs)
self.meter_id = kwargs.get('meter_id', None)
self.billing_location = kwargs.get('billing_location', None)
self.short_name = kwargs.get('short_name', None)
self.friendly_name = kwargs.get('friendly_name', None)
self.resource_type = kwargs.get('resource_type', None)
self.os_type = kwargs.get('os_type', None)
class Capability(Model):
"""Describes the capabilities/features allowed for a specific SKU.
:param name: Name of the SKU capability.
:type name: str
:param value: Value of the SKU capability.
:type value: str
:param reason: Reason of the SKU capability.
:type reason: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Capability, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
self.reason = kwargs.get('reason', None)
class Certificate(Resource):
"""SSL certificate for an app.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:ivar friendly_name: Friendly name of the certificate.
:vartype friendly_name: str
:ivar subject_name: Subject name of the certificate.
:vartype subject_name: str
:param host_names: Host names the certificate applies to.
:type host_names: list[str]
:param pfx_blob: Pfx blob.
:type pfx_blob: bytearray
:ivar site_name: App name.
:vartype site_name: str
:ivar self_link: Self link.
:vartype self_link: str
:ivar issuer: Certificate issuer.
:vartype issuer: str
:ivar issue_date: Certificate issue Date.
:vartype issue_date: datetime
:ivar expiration_date: Certificate expiration date.
:vartype expiration_date: datetime
:param password: Required. Certificate password.
:type password: str
:ivar thumbprint: Certificate thumbprint.
:vartype thumbprint: str
:ivar valid: Is the certificate valid?.
:vartype valid: bool
:ivar cer_blob: Raw bytes of .cer file
:vartype cer_blob: bytearray
:ivar public_key_hash: Public key hash.
:vartype public_key_hash: str
:ivar hosting_environment_profile: Specification for the App Service
Environment to use for the certificate.
:vartype hosting_environment_profile:
~azure.mgmt.web.models.HostingEnvironmentProfile
:param key_vault_id: Key Vault Csm resource Id.
:type key_vault_id: str
:param key_vault_secret_name: Key Vault secret name.
:type key_vault_secret_name: str
:ivar key_vault_secret_status: Status of the Key Vault secret. Possible
values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded',
'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault',
'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist',
'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey',
'Unknown'
:vartype key_vault_secret_status: str or
~azure.mgmt.web.models.KeyVaultSecretStatus
:param server_farm_id: Resource ID of the associated App Service plan,
formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'friendly_name': {'readonly': True},
'subject_name': {'readonly': True},
'site_name': {'readonly': True},
'self_link': {'readonly': True},
'issuer': {'readonly': True},
'issue_date': {'readonly': True},
'expiration_date': {'readonly': True},
'password': {'required': True},
'thumbprint': {'readonly': True},
'valid': {'readonly': True},
'cer_blob': {'readonly': True},
'public_key_hash': {'readonly': True},
'hosting_environment_profile': {'readonly': True},
'key_vault_secret_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
'subject_name': {'key': 'properties.subjectName', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'pfx_blob': {'key': 'properties.pfxBlob', 'type': 'bytearray'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'self_link': {'key': 'properties.selfLink', 'type': 'str'},
'issuer': {'key': 'properties.issuer', 'type': 'str'},
'issue_date': {'key': 'properties.issueDate', 'type': 'iso-8601'},
'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'password': {'key': 'properties.password', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'valid': {'key': 'properties.valid', 'type': 'bool'},
'cer_blob': {'key': 'properties.cerBlob', 'type': 'bytearray'},
'public_key_hash': {'key': 'properties.publicKeyHash', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'},
'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'},
'key_vault_secret_status': {'key': 'properties.keyVaultSecretStatus', 'type': 'KeyVaultSecretStatus'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Certificate, self).__init__(**kwargs)
self.friendly_name = None
self.subject_name = None
self.host_names = kwargs.get('host_names', None)
self.pfx_blob = kwargs.get('pfx_blob', None)
self.site_name = None
self.self_link = None
self.issuer = None
self.issue_date = None
self.expiration_date = None
self.password = kwargs.get('password', None)
self.thumbprint = None
self.valid = None
self.cer_blob = None
self.public_key_hash = None
self.hosting_environment_profile = None
self.key_vault_id = kwargs.get('key_vault_id', None)
self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None)
self.key_vault_secret_status = None
self.server_farm_id = kwargs.get('server_farm_id', None)
class CertificateDetails(Model):
"""SSL certificate details.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar version: Certificate Version.
:vartype version: int
:ivar serial_number: Certificate Serial Number.
:vartype serial_number: str
:ivar thumbprint: Certificate Thumbprint.
:vartype thumbprint: str
:ivar subject: Certificate Subject.
:vartype subject: str
:ivar not_before: Date Certificate is valid from.
:vartype not_before: datetime
:ivar not_after: Date Certificate is valid to.
:vartype not_after: datetime
:ivar signature_algorithm: Certificate Signature algorithm.
:vartype signature_algorithm: str
:ivar issuer: Certificate Issuer.
:vartype issuer: str
:ivar raw_data: Raw certificate data.
:vartype raw_data: str
"""
_validation = {
'version': {'readonly': True},
'serial_number': {'readonly': True},
'thumbprint': {'readonly': True},
'subject': {'readonly': True},
'not_before': {'readonly': True},
'not_after': {'readonly': True},
'signature_algorithm': {'readonly': True},
'issuer': {'readonly': True},
'raw_data': {'readonly': True},
}
_attribute_map = {
'version': {'key': 'version', 'type': 'int'},
'serial_number': {'key': 'serialNumber', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'subject': {'key': 'subject', 'type': 'str'},
'not_before': {'key': 'notBefore', 'type': 'iso-8601'},
'not_after': {'key': 'notAfter', 'type': 'iso-8601'},
'signature_algorithm': {'key': 'signatureAlgorithm', 'type': 'str'},
'issuer': {'key': 'issuer', 'type': 'str'},
'raw_data': {'key': 'rawData', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CertificateDetails, self).__init__(**kwargs)
self.version = None
self.serial_number = None
self.thumbprint = None
self.subject = None
self.not_before = None
self.not_after = None
self.signature_algorithm = None
self.issuer = None
self.raw_data = None
class CertificateEmail(ProxyOnlyResource):
"""SSL certificate email.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param email_id: Email id.
:type email_id: str
:param time_stamp: Time stamp.
:type time_stamp: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'email_id': {'key': 'properties.emailId', 'type': 'str'},
'time_stamp': {'key': 'properties.timeStamp', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(CertificateEmail, self).__init__(**kwargs)
self.email_id = kwargs.get('email_id', None)
self.time_stamp = kwargs.get('time_stamp', None)
class CertificateOrderAction(ProxyOnlyResource):
"""Certificate order action.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar action_type: Action type. Possible values include:
'CertificateIssued', 'CertificateOrderCanceled',
'CertificateOrderCreated', 'CertificateRevoked',
'DomainValidationComplete', 'FraudDetected', 'OrgNameChange',
'OrgValidationComplete', 'SanDrop', 'FraudCleared', 'CertificateExpired',
'CertificateExpirationWarning', 'FraudDocumentationRequired', 'Unknown'
:vartype action_type: str or
~azure.mgmt.web.models.CertificateOrderActionType
:ivar created_at: Time at which the certificate action was performed.
:vartype created_at: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'action_type': {'readonly': True},
'created_at': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'action_type': {'key': 'properties.actionType', 'type': 'CertificateOrderActionType'},
'created_at': {'key': 'properties.createdAt', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(CertificateOrderAction, self).__init__(**kwargs)
self.action_type = None
self.created_at = None
class CertificatePatchResource(ProxyOnlyResource):
"""ARM resource for a certificate.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar friendly_name: Friendly name of the certificate.
:vartype friendly_name: str
:ivar subject_name: Subject name of the certificate.
:vartype subject_name: str
:param host_names: Host names the certificate applies to.
:type host_names: list[str]
:param pfx_blob: Pfx blob.
:type pfx_blob: bytearray
:ivar site_name: App name.
:vartype site_name: str
:ivar self_link: Self link.
:vartype self_link: str
:ivar issuer: Certificate issuer.
:vartype issuer: str
:ivar issue_date: Certificate issue Date.
:vartype issue_date: datetime
:ivar expiration_date: Certificate expiration date.
:vartype expiration_date: datetime
:param password: Required. Certificate password.
:type password: str
:ivar thumbprint: Certificate thumbprint.
:vartype thumbprint: str
:ivar valid: Is the certificate valid?.
:vartype valid: bool
:ivar cer_blob: Raw bytes of .cer file
:vartype cer_blob: bytearray
:ivar public_key_hash: Public key hash.
:vartype public_key_hash: str
:ivar hosting_environment_profile: Specification for the App Service
Environment to use for the certificate.
:vartype hosting_environment_profile:
~azure.mgmt.web.models.HostingEnvironmentProfile
:param key_vault_id: Key Vault Csm resource Id.
:type key_vault_id: str
:param key_vault_secret_name: Key Vault secret name.
:type key_vault_secret_name: str
:ivar key_vault_secret_status: Status of the Key Vault secret. Possible
values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded',
'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault',
'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist',
'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey',
'Unknown'
:vartype key_vault_secret_status: str or
~azure.mgmt.web.models.KeyVaultSecretStatus
:param server_farm_id: Resource ID of the associated App Service plan,
formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'friendly_name': {'readonly': True},
'subject_name': {'readonly': True},
'site_name': {'readonly': True},
'self_link': {'readonly': True},
'issuer': {'readonly': True},
'issue_date': {'readonly': True},
'expiration_date': {'readonly': True},
'password': {'required': True},
'thumbprint': {'readonly': True},
'valid': {'readonly': True},
'cer_blob': {'readonly': True},
'public_key_hash': {'readonly': True},
'hosting_environment_profile': {'readonly': True},
'key_vault_secret_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'},
'subject_name': {'key': 'properties.subjectName', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'pfx_blob': {'key': 'properties.pfxBlob', 'type': 'bytearray'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'self_link': {'key': 'properties.selfLink', 'type': 'str'},
'issuer': {'key': 'properties.issuer', 'type': 'str'},
'issue_date': {'key': 'properties.issueDate', 'type': 'iso-8601'},
'expiration_date': {'key': 'properties.expirationDate', 'type': 'iso-8601'},
'password': {'key': 'properties.password', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'valid': {'key': 'properties.valid', 'type': 'bool'},
'cer_blob': {'key': 'properties.cerBlob', 'type': 'bytearray'},
'public_key_hash': {'key': 'properties.publicKeyHash', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'},
'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'},
'key_vault_secret_status': {'key': 'properties.keyVaultSecretStatus', 'type': 'KeyVaultSecretStatus'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CertificatePatchResource, self).__init__(**kwargs)
self.friendly_name = None
self.subject_name = None
self.host_names = kwargs.get('host_names', None)
self.pfx_blob = kwargs.get('pfx_blob', None)
self.site_name = None
self.self_link = None
self.issuer = None
self.issue_date = None
self.expiration_date = None
self.password = kwargs.get('password', None)
self.thumbprint = None
self.valid = None
self.cer_blob = None
self.public_key_hash = None
self.hosting_environment_profile = None
self.key_vault_id = kwargs.get('key_vault_id', None)
self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None)
self.key_vault_secret_status = None
self.server_farm_id = kwargs.get('server_farm_id', None)
class CloningInfo(Model):
"""Information needed for cloning operation.
All required parameters must be populated in order to send to Azure.
:param correlation_id: Correlation ID of cloning operation. This ID ties
multiple cloning operations
together to use the same snapshot.
:type correlation_id: str
:param overwrite: <code>true</code> to overwrite destination app;
otherwise, <code>false</code>.
:type overwrite: bool
:param clone_custom_host_names: <code>true</code> to clone custom
hostnames from source app; otherwise, <code>false</code>.
:type clone_custom_host_names: bool
:param clone_source_control: <code>true</code> to clone source control
from source app; otherwise, <code>false</code>.
:type clone_source_control: bool
:param source_web_app_id: Required. ARM resource ID of the source app. App
resource ID is of the form
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}
for production slots and
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slotName}
for other slots.
:type source_web_app_id: str
:param source_web_app_location: Location of source app ex: West US or
North Europe
:type source_web_app_location: str
:param hosting_environment: App Service Environment.
:type hosting_environment: str
:param app_settings_overrides: Application setting overrides for cloned
app. If specified, these settings override the settings cloned
from source app. Otherwise, application settings from source app are
retained.
:type app_settings_overrides: dict[str, str]
:param configure_load_balancing: <code>true</code> to configure load
balancing for source and destination app.
:type configure_load_balancing: bool
:param traffic_manager_profile_id: ARM resource ID of the Traffic Manager
profile to use, if it exists. Traffic Manager resource ID is of the form
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficManagerProfiles/{profileName}.
:type traffic_manager_profile_id: str
:param traffic_manager_profile_name: Name of Traffic Manager profile to
create. This is only needed if Traffic Manager profile does not already
exist.
:type traffic_manager_profile_name: str
"""
_validation = {
'source_web_app_id': {'required': True},
}
_attribute_map = {
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'clone_custom_host_names': {'key': 'cloneCustomHostNames', 'type': 'bool'},
'clone_source_control': {'key': 'cloneSourceControl', 'type': 'bool'},
'source_web_app_id': {'key': 'sourceWebAppId', 'type': 'str'},
'source_web_app_location': {'key': 'sourceWebAppLocation', 'type': 'str'},
'hosting_environment': {'key': 'hostingEnvironment', 'type': 'str'},
'app_settings_overrides': {'key': 'appSettingsOverrides', 'type': '{str}'},
'configure_load_balancing': {'key': 'configureLoadBalancing', 'type': 'bool'},
'traffic_manager_profile_id': {'key': 'trafficManagerProfileId', 'type': 'str'},
'traffic_manager_profile_name': {'key': 'trafficManagerProfileName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CloningInfo, self).__init__(**kwargs)
self.correlation_id = kwargs.get('correlation_id', None)
self.overwrite = kwargs.get('overwrite', None)
self.clone_custom_host_names = kwargs.get('clone_custom_host_names', None)
self.clone_source_control = kwargs.get('clone_source_control', None)
self.source_web_app_id = kwargs.get('source_web_app_id', None)
self.source_web_app_location = kwargs.get('source_web_app_location', None)
self.hosting_environment = kwargs.get('hosting_environment', None)
self.app_settings_overrides = kwargs.get('app_settings_overrides', None)
self.configure_load_balancing = kwargs.get('configure_load_balancing', None)
self.traffic_manager_profile_id = kwargs.get('traffic_manager_profile_id', None)
self.traffic_manager_profile_name = kwargs.get('traffic_manager_profile_name', None)
class CloudError(Model):
"""CloudError.
"""
_attribute_map = {
}
class ConnectionStringDictionary(ProxyOnlyResource):
"""String dictionary resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Connection strings.
:type properties: dict[str,
~azure.mgmt.web.models.ConnStringValueTypePair]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{ConnStringValueTypePair}'},
}
def __init__(self, **kwargs):
super(ConnectionStringDictionary, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class ConnStringInfo(Model):
"""Database connection string information.
:param name: Name of connection string.
:type name: str
:param connection_string: Connection string value.
:type connection_string: str
:param type: Type of database. Possible values include: 'MySql',
'SQLServer', 'SQLAzure', 'Custom', 'NotificationHub', 'ServiceBus',
'EventHub', 'ApiHub', 'DocDb', 'RedisCache', 'PostgreSQL'
:type type: str or ~azure.mgmt.web.models.ConnectionStringType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'type': {'key': 'type', 'type': 'ConnectionStringType'},
}
def __init__(self, **kwargs):
super(ConnStringInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.connection_string = kwargs.get('connection_string', None)
self.type = kwargs.get('type', None)
class ConnStringValueTypePair(Model):
"""Database connection string value to type pair.
All required parameters must be populated in order to send to Azure.
:param value: Required. Value of pair.
:type value: str
:param type: Required. Type of database. Possible values include: 'MySql',
'SQLServer', 'SQLAzure', 'Custom', 'NotificationHub', 'ServiceBus',
'EventHub', 'ApiHub', 'DocDb', 'RedisCache', 'PostgreSQL'
:type type: str or ~azure.mgmt.web.models.ConnectionStringType
"""
_validation = {
'value': {'required': True},
'type': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'ConnectionStringType'},
}
def __init__(self, **kwargs):
super(ConnStringValueTypePair, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.type = kwargs.get('type', None)
class Contact(Model):
"""Contact information for domain registration. If 'Domain Privacy' option is
not selected then the contact information is made publicly available
through the Whois
directories as per ICANN requirements.
All required parameters must be populated in order to send to Azure.
:param address_mailing: Mailing address.
:type address_mailing: ~azure.mgmt.web.models.Address
:param email: Required. Email address.
:type email: str
:param fax: Fax number.
:type fax: str
:param job_title: Job title.
:type job_title: str
:param name_first: Required. First name.
:type name_first: str
:param name_last: Required. Last name.
:type name_last: str
:param name_middle: Middle name.
:type name_middle: str
:param organization: Organization contact belongs to.
:type organization: str
:param phone: Required. Phone number.
:type phone: str
"""
_validation = {
'email': {'required': True},
'name_first': {'required': True},
'name_last': {'required': True},
'phone': {'required': True},
}
_attribute_map = {
'address_mailing': {'key': 'addressMailing', 'type': 'Address'},
'email': {'key': 'email', 'type': 'str'},
'fax': {'key': 'fax', 'type': 'str'},
'job_title': {'key': 'jobTitle', 'type': 'str'},
'name_first': {'key': 'nameFirst', 'type': 'str'},
'name_last': {'key': 'nameLast', 'type': 'str'},
'name_middle': {'key': 'nameMiddle', 'type': 'str'},
'organization': {'key': 'organization', 'type': 'str'},
'phone': {'key': 'phone', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Contact, self).__init__(**kwargs)
self.address_mailing = kwargs.get('address_mailing', None)
self.email = kwargs.get('email', None)
self.fax = kwargs.get('fax', None)
self.job_title = kwargs.get('job_title', None)
self.name_first = kwargs.get('name_first', None)
self.name_last = kwargs.get('name_last', None)
self.name_middle = kwargs.get('name_middle', None)
self.organization = kwargs.get('organization', None)
self.phone = kwargs.get('phone', None)
class ContinuousWebJob(ProxyOnlyResource):
"""Continuous Web Job Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param status: Job status. Possible values include: 'Initializing',
'Starting', 'Running', 'PendingRestart', 'Stopped'
:type status: str or ~azure.mgmt.web.models.ContinuousWebJobStatus
:param detailed_status: Detailed status.
:type detailed_status: str
:param log_url: Log URL.
:type log_url: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param web_job_type: Job type. Possible values include: 'Continuous',
'Triggered'
:type web_job_type: str or ~azure.mgmt.web.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, object]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'ContinuousWebJobStatus'},
'detailed_status': {'key': 'properties.detailed_status', 'type': 'str'},
'log_url': {'key': 'properties.log_url', 'type': 'str'},
'run_command': {'key': 'properties.run_command', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extra_info_url', 'type': 'str'},
'web_job_type': {'key': 'properties.web_job_type', 'type': 'WebJobType'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.using_sdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(self, **kwargs):
super(ContinuousWebJob, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.detailed_status = kwargs.get('detailed_status', None)
self.log_url = kwargs.get('log_url', None)
self.run_command = kwargs.get('run_command', None)
self.url = kwargs.get('url', None)
self.extra_info_url = kwargs.get('extra_info_url', None)
self.web_job_type = kwargs.get('web_job_type', None)
self.error = kwargs.get('error', None)
self.using_sdk = kwargs.get('using_sdk', None)
self.settings = kwargs.get('settings', None)
class CorsSettings(Model):
"""Cross-Origin Resource Sharing (CORS) settings for the app.
:param allowed_origins: Gets or sets the list of origins that should be
allowed to make cross-origin
calls (for example: http://example.com:12345). Use "*" to allow all.
:type allowed_origins: list[str]
:param support_credentials: Gets or sets whether CORS requests with
credentials are allowed. See
https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS#Requests_with_credentials
for more details.
:type support_credentials: bool
"""
_attribute_map = {
'allowed_origins': {'key': 'allowedOrigins', 'type': '[str]'},
'support_credentials': {'key': 'supportCredentials', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(CorsSettings, self).__init__(**kwargs)
self.allowed_origins = kwargs.get('allowed_origins', None)
self.support_credentials = kwargs.get('support_credentials', None)
class CsmMoveResourceEnvelope(Model):
"""Object with a list of the resources that need to be moved and the resource
group they should be moved to.
:param target_resource_group:
:type target_resource_group: str
:param resources:
:type resources: list[str]
"""
_validation = {
'target_resource_group': {'max_length': 90, 'min_length': 1, 'pattern': r' ^[-\w\._\(\)]+[^\.]$'},
}
_attribute_map = {
'target_resource_group': {'key': 'targetResourceGroup', 'type': 'str'},
'resources': {'key': 'resources', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(CsmMoveResourceEnvelope, self).__init__(**kwargs)
self.target_resource_group = kwargs.get('target_resource_group', None)
self.resources = kwargs.get('resources', None)
class CsmOperationDescription(Model):
"""Description of an operation available for Microsoft.Web resource provider.
:param name:
:type name: str
:param display:
:type display: ~azure.mgmt.web.models.CsmOperationDisplay
:param origin:
:type origin: str
:param properties:
:type properties: ~azure.mgmt.web.models.CsmOperationDescriptionProperties
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'CsmOperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'CsmOperationDescriptionProperties'},
}
def __init__(self, **kwargs):
super(CsmOperationDescription, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display = kwargs.get('display', None)
self.origin = kwargs.get('origin', None)
self.properties = kwargs.get('properties', None)
class CsmOperationDescriptionProperties(Model):
"""Properties available for a Microsoft.Web resource provider operation.
:param service_specification:
:type service_specification: ~azure.mgmt.web.models.ServiceSpecification
"""
_attribute_map = {
'service_specification': {'key': 'serviceSpecification', 'type': 'ServiceSpecification'},
}
def __init__(self, **kwargs):
super(CsmOperationDescriptionProperties, self).__init__(**kwargs)
self.service_specification = kwargs.get('service_specification', None)
class CsmOperationDisplay(Model):
"""Meta data about operation used for display in portal.
:param provider:
:type provider: str
:param resource:
:type resource: str
:param operation:
:type operation: str
:param description:
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(CsmOperationDisplay, self).__init__(**kwargs)
self.provider = kwargs.get('provider', None)
self.resource = kwargs.get('resource', None)
self.operation = kwargs.get('operation', None)
self.description = kwargs.get('description', None)
class CsmPublishingProfileOptions(Model):
"""Publishing options for requested profile.
:param format: Name of the format. Valid values are:
FileZilla3
WebDeploy -- default
Ftp. Possible values include: 'FileZilla3', 'WebDeploy', 'Ftp'
:type format: str or ~azure.mgmt.web.models.PublishingProfileFormat
:param include_disaster_recovery_endpoints: Include the DisasterRecover
endpoint if true
:type include_disaster_recovery_endpoints: bool
"""
_attribute_map = {
'format': {'key': 'format', 'type': 'str'},
'include_disaster_recovery_endpoints': {'key': 'includeDisasterRecoveryEndpoints', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(CsmPublishingProfileOptions, self).__init__(**kwargs)
self.format = kwargs.get('format', None)
self.include_disaster_recovery_endpoints = kwargs.get('include_disaster_recovery_endpoints', None)
class CsmSlotEntity(Model):
"""Deployment slot parameters.
All required parameters must be populated in order to send to Azure.
:param target_slot: Required. Destination deployment slot during swap
operation.
:type target_slot: str
:param preserve_vnet: Required. <code>true</code> to preserve Virtual
Network to the slot during swap; otherwise, <code>false</code>.
:type preserve_vnet: bool
"""
_validation = {
'target_slot': {'required': True},
'preserve_vnet': {'required': True},
}
_attribute_map = {
'target_slot': {'key': 'targetSlot', 'type': 'str'},
'preserve_vnet': {'key': 'preserveVnet', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(CsmSlotEntity, self).__init__(**kwargs)
self.target_slot = kwargs.get('target_slot', None)
self.preserve_vnet = kwargs.get('preserve_vnet', None)
class CsmUsageQuota(Model):
"""Usage of the quota resource.
:param unit: Units of measurement for the quota resource.
:type unit: str
:param next_reset_time: Next reset time for the resource counter.
:type next_reset_time: datetime
:param current_value: The current value of the resource counter.
:type current_value: long
:param limit: The resource limit.
:type limit: long
:param name: Quota name.
:type name: ~azure.mgmt.web.models.LocalizableString
"""
_attribute_map = {
'unit': {'key': 'unit', 'type': 'str'},
'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'LocalizableString'},
}
def __init__(self, **kwargs):
super(CsmUsageQuota, self).__init__(**kwargs)
self.unit = kwargs.get('unit', None)
self.next_reset_time = kwargs.get('next_reset_time', None)
self.current_value = kwargs.get('current_value', None)
self.limit = kwargs.get('limit', None)
self.name = kwargs.get('name', None)
class CustomHostnameAnalysisResult(ProxyOnlyResource):
"""Custom domain analysis.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar is_hostname_already_verified: <code>true</code> if hostname is
already verified; otherwise, <code>false</code>.
:vartype is_hostname_already_verified: bool
:ivar custom_domain_verification_test: DNS verification test result.
Possible values include: 'Passed', 'Failed', 'Skipped'
:vartype custom_domain_verification_test: str or
~azure.mgmt.web.models.DnsVerificationTestResult
:ivar custom_domain_verification_failure_info: Raw failure information if
DNS verification fails.
:vartype custom_domain_verification_failure_info:
~azure.mgmt.web.models.ErrorEntity
:ivar has_conflict_on_scale_unit: <code>true</code> if there is a conflict
on a scale unit; otherwise, <code>false</code>.
:vartype has_conflict_on_scale_unit: bool
:ivar has_conflict_across_subscription: <code>true</code> if there is a
conflict across subscriptions; otherwise, <code>false</code>.
:vartype has_conflict_across_subscription: bool
:ivar conflicting_app_resource_id: Name of the conflicting app on scale
unit if it's within the same subscription.
:vartype conflicting_app_resource_id: str
:param c_name_records: CName records controller can see for this hostname.
:type c_name_records: list[str]
:param txt_records: TXT records controller can see for this hostname.
:type txt_records: list[str]
:param a_records: A records controller can see for this hostname.
:type a_records: list[str]
:param alternate_cname_records: Alternate CName records controller can see
for this hostname.
:type alternate_cname_records: list[str]
:param alternate_txt_records: Alternate TXT records controller can see for
this hostname.
:type alternate_txt_records: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'is_hostname_already_verified': {'readonly': True},
'custom_domain_verification_test': {'readonly': True},
'custom_domain_verification_failure_info': {'readonly': True},
'has_conflict_on_scale_unit': {'readonly': True},
'has_conflict_across_subscription': {'readonly': True},
'conflicting_app_resource_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_hostname_already_verified': {'key': 'properties.isHostnameAlreadyVerified', 'type': 'bool'},
'custom_domain_verification_test': {'key': 'properties.customDomainVerificationTest', 'type': 'DnsVerificationTestResult'},
'custom_domain_verification_failure_info': {'key': 'properties.customDomainVerificationFailureInfo', 'type': 'ErrorEntity'},
'has_conflict_on_scale_unit': {'key': 'properties.hasConflictOnScaleUnit', 'type': 'bool'},
'has_conflict_across_subscription': {'key': 'properties.hasConflictAcrossSubscription', 'type': 'bool'},
'conflicting_app_resource_id': {'key': 'properties.conflictingAppResourceId', 'type': 'str'},
'c_name_records': {'key': 'properties.cNameRecords', 'type': '[str]'},
'txt_records': {'key': 'properties.txtRecords', 'type': '[str]'},
'a_records': {'key': 'properties.aRecords', 'type': '[str]'},
'alternate_cname_records': {'key': 'properties.alternateCNameRecords', 'type': '[str]'},
'alternate_txt_records': {'key': 'properties.alternateTxtRecords', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(CustomHostnameAnalysisResult, self).__init__(**kwargs)
self.is_hostname_already_verified = None
self.custom_domain_verification_test = None
self.custom_domain_verification_failure_info = None
self.has_conflict_on_scale_unit = None
self.has_conflict_across_subscription = None
self.conflicting_app_resource_id = None
self.c_name_records = kwargs.get('c_name_records', None)
self.txt_records = kwargs.get('txt_records', None)
self.a_records = kwargs.get('a_records', None)
self.alternate_cname_records = kwargs.get('alternate_cname_records', None)
self.alternate_txt_records = kwargs.get('alternate_txt_records', None)
class DatabaseBackupSetting(Model):
"""Database backup settings.
All required parameters must be populated in order to send to Azure.
:param database_type: Required. Database type (e.g. SqlAzure / MySql).
Possible values include: 'SqlAzure', 'MySql', 'LocalMySql', 'PostgreSql'
:type database_type: str or ~azure.mgmt.web.models.DatabaseType
:param name:
:type name: str
:param connection_string_name: Contains a connection string name that is
linked to the SiteConfig.ConnectionStrings.
This is used during restore with overwrite connection strings options.
:type connection_string_name: str
:param connection_string: Contains a connection string to a database which
is being backed up or restored. If the restore should happen to a new
database, the database name inside is the new one.
:type connection_string: str
"""
_validation = {
'database_type': {'required': True},
}
_attribute_map = {
'database_type': {'key': 'databaseType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'connection_string_name': {'key': 'connectionStringName', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DatabaseBackupSetting, self).__init__(**kwargs)
self.database_type = kwargs.get('database_type', None)
self.name = kwargs.get('name', None)
self.connection_string_name = kwargs.get('connection_string_name', None)
self.connection_string = kwargs.get('connection_string', None)
class DataSource(Model):
"""Class representing data source used by the detectors.
:param instructions: Instructions if any for the data source
:type instructions: list[str]
:param data_source_uri: Datasource Uri Links
:type data_source_uri: list[~azure.mgmt.web.models.NameValuePair]
"""
_attribute_map = {
'instructions': {'key': 'instructions', 'type': '[str]'},
'data_source_uri': {'key': 'dataSourceUri', 'type': '[NameValuePair]'},
}
def __init__(self, **kwargs):
super(DataSource, self).__init__(**kwargs)
self.instructions = kwargs.get('instructions', None)
self.data_source_uri = kwargs.get('data_source_uri', None)
class DataTableResponseColumn(Model):
"""Column definition.
:param column_name: Name of the column
:type column_name: str
:param data_type: Data type which looks like 'String' or 'Int32'.
:type data_type: str
:param column_type: Column Type
:type column_type: str
"""
_attribute_map = {
'column_name': {'key': 'columnName', 'type': 'str'},
'data_type': {'key': 'dataType', 'type': 'str'},
'column_type': {'key': 'columnType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DataTableResponseColumn, self).__init__(**kwargs)
self.column_name = kwargs.get('column_name', None)
self.data_type = kwargs.get('data_type', None)
self.column_type = kwargs.get('column_type', None)
class DataTableResponseObject(Model):
"""Data Table which defines columns and raw row values.
:param table_name: Name of the table
:type table_name: str
:param columns: List of columns with data types
:type columns: list[~azure.mgmt.web.models.DataTableResponseColumn]
:param rows: Raw row values
:type rows: list[list[str]]
"""
_attribute_map = {
'table_name': {'key': 'tableName', 'type': 'str'},
'columns': {'key': 'columns', 'type': '[DataTableResponseColumn]'},
'rows': {'key': 'rows', 'type': '[[str]]'},
}
def __init__(self, **kwargs):
super(DataTableResponseObject, self).__init__(**kwargs)
self.table_name = kwargs.get('table_name', None)
self.columns = kwargs.get('columns', None)
self.rows = kwargs.get('rows', None)
class DefaultErrorResponse(Model):
"""App Service error response.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar error: Error model.
:vartype error: ~azure.mgmt.web.models.DefaultErrorResponseError
"""
_validation = {
'error': {'readonly': True},
}
_attribute_map = {
'error': {'key': 'error', 'type': 'DefaultErrorResponseError'},
}
def __init__(self, **kwargs):
super(DefaultErrorResponse, self).__init__(**kwargs)
self.error = None
class DefaultErrorResponseException(HttpOperationError):
"""Server responsed with exception of type: 'DefaultErrorResponse'.
:param deserialize: A deserializer
:param response: Server response to be deserialized.
"""
def __init__(self, deserialize, response, *args):
super(DefaultErrorResponseException, self).__init__(deserialize, response, 'DefaultErrorResponse', *args)
class DefaultErrorResponseError(Model):
"""Error model.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar code: Standardized string to programmatically identify the error.
:vartype code: str
:ivar message: Detailed error description and debugging information.
:vartype message: str
:ivar target: Detailed error description and debugging information.
:vartype target: str
:param details:
:type details:
list[~azure.mgmt.web.models.DefaultErrorResponseErrorDetailsItem]
:ivar innererror: More information to debug error.
:vartype innererror: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
'innererror': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[DefaultErrorResponseErrorDetailsItem]'},
'innererror': {'key': 'innererror', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DefaultErrorResponseError, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
self.details = kwargs.get('details', None)
self.innererror = None
class DefaultErrorResponseErrorDetailsItem(Model):
"""Detailed errors.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar code: Standardized string to programmatically identify the error.
:vartype code: str
:ivar message: Detailed error description and debugging information.
:vartype message: str
:ivar target: Detailed error description and debugging information.
:vartype target: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DefaultErrorResponseErrorDetailsItem, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
class DeletedAppRestoreRequest(ProxyOnlyResource):
"""Details about restoring a deleted app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param deleted_site_id: ARM resource ID of the deleted app. Example:
/subscriptions/{subId}/providers/Microsoft.Web/deletedSites/{deletedSiteId}
:type deleted_site_id: str
:param recover_configuration: If true, deleted site configuration, in
addition to content, will be restored.
:type recover_configuration: bool
:param snapshot_time: Point in time to restore the deleted app from,
formatted as a DateTime string.
If unspecified, default value is the time that the app was deleted.
:type snapshot_time: str
:param use_dr_secondary: If true, the snapshot is retrieved from
DRSecondary endpoint.
:type use_dr_secondary: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'deleted_site_id': {'key': 'properties.deletedSiteId', 'type': 'str'},
'recover_configuration': {'key': 'properties.recoverConfiguration', 'type': 'bool'},
'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'str'},
'use_dr_secondary': {'key': 'properties.useDRSecondary', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(DeletedAppRestoreRequest, self).__init__(**kwargs)
self.deleted_site_id = kwargs.get('deleted_site_id', None)
self.recover_configuration = kwargs.get('recover_configuration', None)
self.snapshot_time = kwargs.get('snapshot_time', None)
self.use_dr_secondary = kwargs.get('use_dr_secondary', None)
class DeletedSite(ProxyOnlyResource):
"""A deleted app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar deleted_site_id: Numeric id for the deleted site
:vartype deleted_site_id: int
:ivar deleted_timestamp: Time in UTC when the app was deleted.
:vartype deleted_timestamp: str
:ivar subscription: Subscription containing the deleted site
:vartype subscription: str
:ivar resource_group: ResourceGroup that contained the deleted site
:vartype resource_group: str
:ivar deleted_site_name: Name of the deleted site
:vartype deleted_site_name: str
:ivar slot: Slot of the deleted site
:vartype slot: str
:ivar deleted_site_kind: Kind of site that was deleted
:vartype deleted_site_kind: str
:ivar geo_region_name: Geo Region of the deleted site
:vartype geo_region_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'deleted_site_id': {'readonly': True},
'deleted_timestamp': {'readonly': True},
'subscription': {'readonly': True},
'resource_group': {'readonly': True},
'deleted_site_name': {'readonly': True},
'slot': {'readonly': True},
'deleted_site_kind': {'readonly': True},
'geo_region_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'deleted_site_id': {'key': 'properties.deletedSiteId', 'type': 'int'},
'deleted_timestamp': {'key': 'properties.deletedTimestamp', 'type': 'str'},
'subscription': {'key': 'properties.subscription', 'type': 'str'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'deleted_site_name': {'key': 'properties.deletedSiteName', 'type': 'str'},
'slot': {'key': 'properties.slot', 'type': 'str'},
'deleted_site_kind': {'key': 'properties.kind', 'type': 'str'},
'geo_region_name': {'key': 'properties.geoRegionName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DeletedSite, self).__init__(**kwargs)
self.deleted_site_id = None
self.deleted_timestamp = None
self.subscription = None
self.resource_group = None
self.deleted_site_name = None
self.slot = None
self.deleted_site_kind = None
self.geo_region_name = None
class Deployment(ProxyOnlyResource):
"""User credentials used for publishing activity.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param status: Deployment status.
:type status: int
:param message: Details about deployment status.
:type message: str
:param author: Who authored the deployment.
:type author: str
:param deployer: Who performed the deployment.
:type deployer: str
:param author_email: Author email.
:type author_email: str
:param start_time: Start time.
:type start_time: datetime
:param end_time: End time.
:type end_time: datetime
:param active: True if deployment is currently active, false if completed
and null if not started.
:type active: bool
:param details: Details on deployment.
:type details: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'int'},
'message': {'key': 'properties.message', 'type': 'str'},
'author': {'key': 'properties.author', 'type': 'str'},
'deployer': {'key': 'properties.deployer', 'type': 'str'},
'author_email': {'key': 'properties.author_email', 'type': 'str'},
'start_time': {'key': 'properties.start_time', 'type': 'iso-8601'},
'end_time': {'key': 'properties.end_time', 'type': 'iso-8601'},
'active': {'key': 'properties.active', 'type': 'bool'},
'details': {'key': 'properties.details', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Deployment, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.message = kwargs.get('message', None)
self.author = kwargs.get('author', None)
self.deployer = kwargs.get('deployer', None)
self.author_email = kwargs.get('author_email', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.active = kwargs.get('active', None)
self.details = kwargs.get('details', None)
class DeploymentLocations(Model):
"""List of available locations (regions or App Service Environments) for
deployment of App Service resources.
:param locations: Available regions.
:type locations: list[~azure.mgmt.web.models.GeoRegion]
:param hosting_environments: Available App Service Environments with full
descriptions of the environments.
:type hosting_environments:
list[~azure.mgmt.web.models.AppServiceEnvironment]
:param hosting_environment_deployment_infos: Available App Service
Environments with basic information.
:type hosting_environment_deployment_infos:
list[~azure.mgmt.web.models.HostingEnvironmentDeploymentInfo]
"""
_attribute_map = {
'locations': {'key': 'locations', 'type': '[GeoRegion]'},
'hosting_environments': {'key': 'hostingEnvironments', 'type': '[AppServiceEnvironment]'},
'hosting_environment_deployment_infos': {'key': 'hostingEnvironmentDeploymentInfos', 'type': '[HostingEnvironmentDeploymentInfo]'},
}
def __init__(self, **kwargs):
super(DeploymentLocations, self).__init__(**kwargs)
self.locations = kwargs.get('locations', None)
self.hosting_environments = kwargs.get('hosting_environments', None)
self.hosting_environment_deployment_infos = kwargs.get('hosting_environment_deployment_infos', None)
class DetectorAbnormalTimePeriod(Model):
"""Class representing Abnormal Time Period detected.
:param start_time: Start time of the correlated event
:type start_time: datetime
:param end_time: End time of the correlated event
:type end_time: datetime
:param message: Message describing the event
:type message: str
:param source: Represents the name of the Detector
:type source: str
:param priority: Represents the rank of the Detector
:type priority: float
:param meta_data: Downtime metadata
:type meta_data: list[list[~azure.mgmt.web.models.NameValuePair]]
:param type: Represents the type of the Detector. Possible values include:
'ServiceIncident', 'AppDeployment', 'AppCrash', 'RuntimeIssueDetected',
'AseDeployment', 'UserIssue', 'PlatformIssue', 'Other'
:type type: str or ~azure.mgmt.web.models.IssueType
:param solutions: List of proposed solutions
:type solutions: list[~azure.mgmt.web.models.Solution]
"""
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'message': {'key': 'message', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'priority': {'key': 'priority', 'type': 'float'},
'meta_data': {'key': 'metaData', 'type': '[[NameValuePair]]'},
'type': {'key': 'type', 'type': 'IssueType'},
'solutions': {'key': 'solutions', 'type': '[Solution]'},
}
def __init__(self, **kwargs):
super(DetectorAbnormalTimePeriod, self).__init__(**kwargs)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.message = kwargs.get('message', None)
self.source = kwargs.get('source', None)
self.priority = kwargs.get('priority', None)
self.meta_data = kwargs.get('meta_data', None)
self.type = kwargs.get('type', None)
self.solutions = kwargs.get('solutions', None)
class DetectorDefinition(ProxyOnlyResource):
"""Class representing detector definition.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar display_name: Display name of the detector
:vartype display_name: str
:ivar description: Description of the detector
:vartype description: str
:ivar rank: Detector Rank
:vartype rank: float
:ivar is_enabled: Flag representing whether detector is enabled or not.
:vartype is_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'display_name': {'readonly': True},
'description': {'readonly': True},
'rank': {'readonly': True},
'is_enabled': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'rank': {'key': 'properties.rank', 'type': 'float'},
'is_enabled': {'key': 'properties.isEnabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(DetectorDefinition, self).__init__(**kwargs)
self.display_name = None
self.description = None
self.rank = None
self.is_enabled = None
class DetectorInfo(Model):
"""Definition of Detector.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar description: Short description of the detector and its purpose
:vartype description: str
:ivar category: Support Category
:vartype category: str
:ivar sub_category: Support Sub Category
:vartype sub_category: str
:ivar support_topic_id: Support Topic Id
:vartype support_topic_id: str
"""
_validation = {
'description': {'readonly': True},
'category': {'readonly': True},
'sub_category': {'readonly': True},
'support_topic_id': {'readonly': True},
}
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'category': {'key': 'category', 'type': 'str'},
'sub_category': {'key': 'subCategory', 'type': 'str'},
'support_topic_id': {'key': 'supportTopicId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DetectorInfo, self).__init__(**kwargs)
self.description = None
self.category = None
self.sub_category = None
self.support_topic_id = None
class DetectorResponse(ProxyOnlyResource):
"""Class representing Response from Detector.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param metadata: metadata for the detector
:type metadata: ~azure.mgmt.web.models.DetectorInfo
:param dataset: Data Set
:type dataset: list[~azure.mgmt.web.models.DiagnosticData]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'metadata': {'key': 'properties.metadata', 'type': 'DetectorInfo'},
'dataset': {'key': 'properties.dataset', 'type': '[DiagnosticData]'},
}
def __init__(self, **kwargs):
super(DetectorResponse, self).__init__(**kwargs)
self.metadata = kwargs.get('metadata', None)
self.dataset = kwargs.get('dataset', None)
class DiagnosticAnalysis(ProxyOnlyResource):
"""Class representing a diagnostic analysis done on an application.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param start_time: Start time of the period
:type start_time: datetime
:param end_time: End time of the period
:type end_time: datetime
:param abnormal_time_periods: List of time periods.
:type abnormal_time_periods:
list[~azure.mgmt.web.models.AbnormalTimePeriod]
:param payload: Data by each detector
:type payload: list[~azure.mgmt.web.models.AnalysisData]
:param non_correlated_detectors: Data by each detector for detectors that
did not corelate
:type non_correlated_detectors:
list[~azure.mgmt.web.models.DetectorDefinition]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'abnormal_time_periods': {'key': 'properties.abnormalTimePeriods', 'type': '[AbnormalTimePeriod]'},
'payload': {'key': 'properties.payload', 'type': '[AnalysisData]'},
'non_correlated_detectors': {'key': 'properties.nonCorrelatedDetectors', 'type': '[DetectorDefinition]'},
}
def __init__(self, **kwargs):
super(DiagnosticAnalysis, self).__init__(**kwargs)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.abnormal_time_periods = kwargs.get('abnormal_time_periods', None)
self.payload = kwargs.get('payload', None)
self.non_correlated_detectors = kwargs.get('non_correlated_detectors', None)
class DiagnosticCategory(ProxyOnlyResource):
"""Class representing detector definition.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar description: Description of the diagnostic category
:vartype description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DiagnosticCategory, self).__init__(**kwargs)
self.description = None
class DiagnosticData(Model):
"""Set of data with rendering instructions.
:param table: Data in table form
:type table: ~azure.mgmt.web.models.DataTableResponseObject
:param rendering_properties: Properties that describe how the table should
be rendered
:type rendering_properties: ~azure.mgmt.web.models.Rendering
"""
_attribute_map = {
'table': {'key': 'table', 'type': 'DataTableResponseObject'},
'rendering_properties': {'key': 'renderingProperties', 'type': 'Rendering'},
}
def __init__(self, **kwargs):
super(DiagnosticData, self).__init__(**kwargs)
self.table = kwargs.get('table', None)
self.rendering_properties = kwargs.get('rendering_properties', None)
class DiagnosticDetectorResponse(ProxyOnlyResource):
"""Class representing Response from Diagnostic Detectors.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param start_time: Start time of the period
:type start_time: datetime
:param end_time: End time of the period
:type end_time: datetime
:param issue_detected: Flag representing Issue was detected.
:type issue_detected: bool
:param detector_definition: Detector's definition
:type detector_definition: ~azure.mgmt.web.models.DetectorDefinition
:param metrics: Metrics provided by the detector
:type metrics: list[~azure.mgmt.web.models.DiagnosticMetricSet]
:param abnormal_time_periods: List of Correlated events found by the
detector
:type abnormal_time_periods:
list[~azure.mgmt.web.models.DetectorAbnormalTimePeriod]
:param data: Additional Data that detector wants to send.
:type data: list[list[~azure.mgmt.web.models.NameValuePair]]
:param response_meta_data: Meta Data
:type response_meta_data: ~azure.mgmt.web.models.ResponseMetaData
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'issue_detected': {'key': 'properties.issueDetected', 'type': 'bool'},
'detector_definition': {'key': 'properties.detectorDefinition', 'type': 'DetectorDefinition'},
'metrics': {'key': 'properties.metrics', 'type': '[DiagnosticMetricSet]'},
'abnormal_time_periods': {'key': 'properties.abnormalTimePeriods', 'type': '[DetectorAbnormalTimePeriod]'},
'data': {'key': 'properties.data', 'type': '[[NameValuePair]]'},
'response_meta_data': {'key': 'properties.responseMetaData', 'type': 'ResponseMetaData'},
}
def __init__(self, **kwargs):
super(DiagnosticDetectorResponse, self).__init__(**kwargs)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.issue_detected = kwargs.get('issue_detected', None)
self.detector_definition = kwargs.get('detector_definition', None)
self.metrics = kwargs.get('metrics', None)
self.abnormal_time_periods = kwargs.get('abnormal_time_periods', None)
self.data = kwargs.get('data', None)
self.response_meta_data = kwargs.get('response_meta_data', None)
class DiagnosticMetricSample(Model):
"""Class representing Diagnostic Metric.
:param timestamp: Time at which metric is measured
:type timestamp: datetime
:param role_instance: Role Instance. Null if this counter is not per
instance
This is returned and should be whichever instance name we desire to be
returned
i.e. CPU and Memory return RDWORKERNAME (LargeDed..._IN_0)
where RDWORKERNAME is Machine name below and RoleInstance name in
parenthesis
:type role_instance: str
:param total: Total value of the metric. If multiple measurements are made
this will have sum of all.
:type total: float
:param maximum: Maximum of the metric sampled during the time period
:type maximum: float
:param minimum: Minimum of the metric sampled during the time period
:type minimum: float
:param is_aggregated: Whether the values are aggregates across all workers
or not
:type is_aggregated: bool
"""
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'role_instance': {'key': 'roleInstance', 'type': 'str'},
'total': {'key': 'total', 'type': 'float'},
'maximum': {'key': 'maximum', 'type': 'float'},
'minimum': {'key': 'minimum', 'type': 'float'},
'is_aggregated': {'key': 'isAggregated', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(DiagnosticMetricSample, self).__init__(**kwargs)
self.timestamp = kwargs.get('timestamp', None)
self.role_instance = kwargs.get('role_instance', None)
self.total = kwargs.get('total', None)
self.maximum = kwargs.get('maximum', None)
self.minimum = kwargs.get('minimum', None)
self.is_aggregated = kwargs.get('is_aggregated', None)
class DiagnosticMetricSet(Model):
"""Class representing Diagnostic Metric information.
:param name: Name of the metric
:type name: str
:param unit: Metric's unit
:type unit: str
:param start_time: Start time of the period
:type start_time: datetime
:param end_time: End time of the period
:type end_time: datetime
:param time_grain: Presented time grain. Supported grains at the moment
are PT1M, PT1H, P1D
:type time_grain: str
:param values: Collection of metric values for the selected period based
on the
{Microsoft.Web.Hosting.Administration.DiagnosticMetricSet.TimeGrain}
:type values: list[~azure.mgmt.web.models.DiagnosticMetricSample]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'values': {'key': 'values', 'type': '[DiagnosticMetricSample]'},
}
def __init__(self, **kwargs):
super(DiagnosticMetricSet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.unit = kwargs.get('unit', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.time_grain = kwargs.get('time_grain', None)
self.values = kwargs.get('values', None)
class Dimension(Model):
"""Dimension of a resource metric. For e.g. instance specific HTTP requests
for a web app,
where instance name is dimension of the metric HTTP request.
:param name:
:type name: str
:param display_name:
:type display_name: str
:param internal_name:
:type internal_name: str
:param to_be_exported_for_shoebox:
:type to_be_exported_for_shoebox: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'internal_name': {'key': 'internalName', 'type': 'str'},
'to_be_exported_for_shoebox': {'key': 'toBeExportedForShoebox', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(Dimension, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.internal_name = kwargs.get('internal_name', None)
self.to_be_exported_for_shoebox = kwargs.get('to_be_exported_for_shoebox', None)
class Domain(Resource):
"""Information about a domain.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param contact_admin: Required. Administrative contact.
:type contact_admin: ~azure.mgmt.web.models.Contact
:param contact_billing: Required. Billing contact.
:type contact_billing: ~azure.mgmt.web.models.Contact
:param contact_registrant: Required. Registrant contact.
:type contact_registrant: ~azure.mgmt.web.models.Contact
:param contact_tech: Required. Technical contact.
:type contact_tech: ~azure.mgmt.web.models.Contact
:ivar registration_status: Domain registration status. Possible values
include: 'Active', 'Awaiting', 'Cancelled', 'Confiscated', 'Disabled',
'Excluded', 'Expired', 'Failed', 'Held', 'Locked', 'Parked', 'Pending',
'Reserved', 'Reverted', 'Suspended', 'Transferred', 'Unknown', 'Unlocked',
'Unparked', 'Updated', 'JsonConverterFailed'
:vartype registration_status: str or ~azure.mgmt.web.models.DomainStatus
:ivar provisioning_state: Domain provisioning state. Possible values
include: 'Succeeded', 'Failed', 'Canceled', 'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar name_servers: Name servers.
:vartype name_servers: list[str]
:param privacy: <code>true</code> if domain privacy is enabled for this
domain; otherwise, <code>false</code>.
:type privacy: bool
:ivar created_time: Domain creation timestamp.
:vartype created_time: datetime
:ivar expiration_time: Domain expiration timestamp.
:vartype expiration_time: datetime
:ivar last_renewed_time: Timestamp when the domain was renewed last time.
:vartype last_renewed_time: datetime
:param auto_renew: <code>true</code> if the domain should be automatically
renewed; otherwise, <code>false</code>. Default value: True .
:type auto_renew: bool
:ivar ready_for_dns_record_management: <code>true</code> if Azure can
assign this domain to App Service apps; otherwise, <code>false</code>.
This value will be <code>true</code> if domain registration status is
active and
it is hosted on name servers Azure has programmatic access to.
:vartype ready_for_dns_record_management: bool
:ivar managed_host_names: All hostnames derived from the domain and
assigned to Azure resources.
:vartype managed_host_names: list[~azure.mgmt.web.models.HostName]
:param consent: Required. Legal agreement consent.
:type consent: ~azure.mgmt.web.models.DomainPurchaseConsent
:ivar domain_not_renewable_reasons: Reasons why domain is not renewable.
:vartype domain_not_renewable_reasons: list[str]
:param dns_type: Current DNS type. Possible values include: 'AzureDns',
'DefaultDomainRegistrarDns'
:type dns_type: str or ~azure.mgmt.web.models.DnsType
:param dns_zone_id: Azure DNS Zone to use
:type dns_zone_id: str
:param target_dns_type: Target DNS type (would be used for migration).
Possible values include: 'AzureDns', 'DefaultDomainRegistrarDns'
:type target_dns_type: str or ~azure.mgmt.web.models.DnsType
:param auth_code:
:type auth_code: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'contact_admin': {'required': True},
'contact_billing': {'required': True},
'contact_registrant': {'required': True},
'contact_tech': {'required': True},
'registration_status': {'readonly': True},
'provisioning_state': {'readonly': True},
'name_servers': {'readonly': True},
'created_time': {'readonly': True},
'expiration_time': {'readonly': True},
'last_renewed_time': {'readonly': True},
'ready_for_dns_record_management': {'readonly': True},
'managed_host_names': {'readonly': True},
'consent': {'required': True},
'domain_not_renewable_reasons': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'contact_admin': {'key': 'properties.contactAdmin', 'type': 'Contact'},
'contact_billing': {'key': 'properties.contactBilling', 'type': 'Contact'},
'contact_registrant': {'key': 'properties.contactRegistrant', 'type': 'Contact'},
'contact_tech': {'key': 'properties.contactTech', 'type': 'Contact'},
'registration_status': {'key': 'properties.registrationStatus', 'type': 'DomainStatus'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'name_servers': {'key': 'properties.nameServers', 'type': '[str]'},
'privacy': {'key': 'properties.privacy', 'type': 'bool'},
'created_time': {'key': 'properties.createdTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'properties.expirationTime', 'type': 'iso-8601'},
'last_renewed_time': {'key': 'properties.lastRenewedTime', 'type': 'iso-8601'},
'auto_renew': {'key': 'properties.autoRenew', 'type': 'bool'},
'ready_for_dns_record_management': {'key': 'properties.readyForDnsRecordManagement', 'type': 'bool'},
'managed_host_names': {'key': 'properties.managedHostNames', 'type': '[HostName]'},
'consent': {'key': 'properties.consent', 'type': 'DomainPurchaseConsent'},
'domain_not_renewable_reasons': {'key': 'properties.domainNotRenewableReasons', 'type': '[str]'},
'dns_type': {'key': 'properties.dnsType', 'type': 'DnsType'},
'dns_zone_id': {'key': 'properties.dnsZoneId', 'type': 'str'},
'target_dns_type': {'key': 'properties.targetDnsType', 'type': 'DnsType'},
'auth_code': {'key': 'properties.authCode', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Domain, self).__init__(**kwargs)
self.contact_admin = kwargs.get('contact_admin', None)
self.contact_billing = kwargs.get('contact_billing', None)
self.contact_registrant = kwargs.get('contact_registrant', None)
self.contact_tech = kwargs.get('contact_tech', None)
self.registration_status = None
self.provisioning_state = None
self.name_servers = None
self.privacy = kwargs.get('privacy', None)
self.created_time = None
self.expiration_time = None
self.last_renewed_time = None
self.auto_renew = kwargs.get('auto_renew', True)
self.ready_for_dns_record_management = None
self.managed_host_names = None
self.consent = kwargs.get('consent', None)
self.domain_not_renewable_reasons = None
self.dns_type = kwargs.get('dns_type', None)
self.dns_zone_id = kwargs.get('dns_zone_id', None)
self.target_dns_type = kwargs.get('target_dns_type', None)
self.auth_code = kwargs.get('auth_code', None)
class DomainAvailablilityCheckResult(Model):
"""Domain availability check result.
:param name: Name of the domain.
:type name: str
:param available: <code>true</code> if domain can be purchased using
CreateDomain API; otherwise, <code>false</code>.
:type available: bool
:param domain_type: Valid values are Regular domain: Azure will charge the
full price of domain registration, SoftDeleted: Purchasing this domain
will simply restore it and this operation will not cost anything. Possible
values include: 'Regular', 'SoftDeleted'
:type domain_type: str or ~azure.mgmt.web.models.DomainType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'available': {'key': 'available', 'type': 'bool'},
'domain_type': {'key': 'domainType', 'type': 'DomainType'},
}
def __init__(self, **kwargs):
super(DomainAvailablilityCheckResult, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.available = kwargs.get('available', None)
self.domain_type = kwargs.get('domain_type', None)
class DomainControlCenterSsoRequest(Model):
"""Single sign-on request information for domain management.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar url: URL where the single sign-on request is to be made.
:vartype url: str
:ivar post_parameter_key: Post parameter key.
:vartype post_parameter_key: str
:ivar post_parameter_value: Post parameter value. Client should use
'application/x-www-form-urlencoded' encoding for this value.
:vartype post_parameter_value: str
"""
_validation = {
'url': {'readonly': True},
'post_parameter_key': {'readonly': True},
'post_parameter_value': {'readonly': True},
}
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
'post_parameter_key': {'key': 'postParameterKey', 'type': 'str'},
'post_parameter_value': {'key': 'postParameterValue', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DomainControlCenterSsoRequest, self).__init__(**kwargs)
self.url = None
self.post_parameter_key = None
self.post_parameter_value = None
class DomainOwnershipIdentifier(ProxyOnlyResource):
"""Domain ownership Identifier.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param ownership_id: Ownership Id.
:type ownership_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'ownership_id': {'key': 'properties.ownershipId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DomainOwnershipIdentifier, self).__init__(**kwargs)
self.ownership_id = kwargs.get('ownership_id', None)
class DomainPatchResource(ProxyOnlyResource):
"""ARM resource for a domain.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param contact_admin: Required. Administrative contact.
:type contact_admin: ~azure.mgmt.web.models.Contact
:param contact_billing: Required. Billing contact.
:type contact_billing: ~azure.mgmt.web.models.Contact
:param contact_registrant: Required. Registrant contact.
:type contact_registrant: ~azure.mgmt.web.models.Contact
:param contact_tech: Required. Technical contact.
:type contact_tech: ~azure.mgmt.web.models.Contact
:ivar registration_status: Domain registration status. Possible values
include: 'Active', 'Awaiting', 'Cancelled', 'Confiscated', 'Disabled',
'Excluded', 'Expired', 'Failed', 'Held', 'Locked', 'Parked', 'Pending',
'Reserved', 'Reverted', 'Suspended', 'Transferred', 'Unknown', 'Unlocked',
'Unparked', 'Updated', 'JsonConverterFailed'
:vartype registration_status: str or ~azure.mgmt.web.models.DomainStatus
:ivar provisioning_state: Domain provisioning state. Possible values
include: 'Succeeded', 'Failed', 'Canceled', 'InProgress', 'Deleting'
:vartype provisioning_state: str or
~azure.mgmt.web.models.ProvisioningState
:ivar name_servers: Name servers.
:vartype name_servers: list[str]
:param privacy: <code>true</code> if domain privacy is enabled for this
domain; otherwise, <code>false</code>.
:type privacy: bool
:ivar created_time: Domain creation timestamp.
:vartype created_time: datetime
:ivar expiration_time: Domain expiration timestamp.
:vartype expiration_time: datetime
:ivar last_renewed_time: Timestamp when the domain was renewed last time.
:vartype last_renewed_time: datetime
:param auto_renew: <code>true</code> if the domain should be automatically
renewed; otherwise, <code>false</code>. Default value: True .
:type auto_renew: bool
:ivar ready_for_dns_record_management: <code>true</code> if Azure can
assign this domain to App Service apps; otherwise, <code>false</code>.
This value will be <code>true</code> if domain registration status is
active and
it is hosted on name servers Azure has programmatic access to.
:vartype ready_for_dns_record_management: bool
:ivar managed_host_names: All hostnames derived from the domain and
assigned to Azure resources.
:vartype managed_host_names: list[~azure.mgmt.web.models.HostName]
:param consent: Required. Legal agreement consent.
:type consent: ~azure.mgmt.web.models.DomainPurchaseConsent
:ivar domain_not_renewable_reasons: Reasons why domain is not renewable.
:vartype domain_not_renewable_reasons: list[str]
:param dns_type: Current DNS type. Possible values include: 'AzureDns',
'DefaultDomainRegistrarDns'
:type dns_type: str or ~azure.mgmt.web.models.DnsType
:param dns_zone_id: Azure DNS Zone to use
:type dns_zone_id: str
:param target_dns_type: Target DNS type (would be used for migration).
Possible values include: 'AzureDns', 'DefaultDomainRegistrarDns'
:type target_dns_type: str or ~azure.mgmt.web.models.DnsType
:param auth_code:
:type auth_code: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'contact_admin': {'required': True},
'contact_billing': {'required': True},
'contact_registrant': {'required': True},
'contact_tech': {'required': True},
'registration_status': {'readonly': True},
'provisioning_state': {'readonly': True},
'name_servers': {'readonly': True},
'created_time': {'readonly': True},
'expiration_time': {'readonly': True},
'last_renewed_time': {'readonly': True},
'ready_for_dns_record_management': {'readonly': True},
'managed_host_names': {'readonly': True},
'consent': {'required': True},
'domain_not_renewable_reasons': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'contact_admin': {'key': 'properties.contactAdmin', 'type': 'Contact'},
'contact_billing': {'key': 'properties.contactBilling', 'type': 'Contact'},
'contact_registrant': {'key': 'properties.contactRegistrant', 'type': 'Contact'},
'contact_tech': {'key': 'properties.contactTech', 'type': 'Contact'},
'registration_status': {'key': 'properties.registrationStatus', 'type': 'DomainStatus'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'ProvisioningState'},
'name_servers': {'key': 'properties.nameServers', 'type': '[str]'},
'privacy': {'key': 'properties.privacy', 'type': 'bool'},
'created_time': {'key': 'properties.createdTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'properties.expirationTime', 'type': 'iso-8601'},
'last_renewed_time': {'key': 'properties.lastRenewedTime', 'type': 'iso-8601'},
'auto_renew': {'key': 'properties.autoRenew', 'type': 'bool'},
'ready_for_dns_record_management': {'key': 'properties.readyForDnsRecordManagement', 'type': 'bool'},
'managed_host_names': {'key': 'properties.managedHostNames', 'type': '[HostName]'},
'consent': {'key': 'properties.consent', 'type': 'DomainPurchaseConsent'},
'domain_not_renewable_reasons': {'key': 'properties.domainNotRenewableReasons', 'type': '[str]'},
'dns_type': {'key': 'properties.dnsType', 'type': 'DnsType'},
'dns_zone_id': {'key': 'properties.dnsZoneId', 'type': 'str'},
'target_dns_type': {'key': 'properties.targetDnsType', 'type': 'DnsType'},
'auth_code': {'key': 'properties.authCode', 'type': 'str'},
}
def __init__(self, **kwargs):
super(DomainPatchResource, self).__init__(**kwargs)
self.contact_admin = kwargs.get('contact_admin', None)
self.contact_billing = kwargs.get('contact_billing', None)
self.contact_registrant = kwargs.get('contact_registrant', None)
self.contact_tech = kwargs.get('contact_tech', None)
self.registration_status = None
self.provisioning_state = None
self.name_servers = None
self.privacy = kwargs.get('privacy', None)
self.created_time = None
self.expiration_time = None
self.last_renewed_time = None
self.auto_renew = kwargs.get('auto_renew', True)
self.ready_for_dns_record_management = None
self.managed_host_names = None
self.consent = kwargs.get('consent', None)
self.domain_not_renewable_reasons = None
self.dns_type = kwargs.get('dns_type', None)
self.dns_zone_id = kwargs.get('dns_zone_id', None)
self.target_dns_type = kwargs.get('target_dns_type', None)
self.auth_code = kwargs.get('auth_code', None)
class DomainPurchaseConsent(Model):
"""Domain purchase consent object, representing acceptance of applicable legal
agreements.
:param agreement_keys: List of applicable legal agreement keys. This list
can be retrieved using ListLegalAgreements API under
<code>TopLevelDomain</code> resource.
:type agreement_keys: list[str]
:param agreed_by: Client IP address.
:type agreed_by: str
:param agreed_at: Timestamp when the agreements were accepted.
:type agreed_at: datetime
"""
_attribute_map = {
'agreement_keys': {'key': 'agreementKeys', 'type': '[str]'},
'agreed_by': {'key': 'agreedBy', 'type': 'str'},
'agreed_at': {'key': 'agreedAt', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(DomainPurchaseConsent, self).__init__(**kwargs)
self.agreement_keys = kwargs.get('agreement_keys', None)
self.agreed_by = kwargs.get('agreed_by', None)
self.agreed_at = kwargs.get('agreed_at', None)
class DomainRecommendationSearchParameters(Model):
"""Domain recommendation search parameters.
:param keywords: Keywords to be used for generating domain
recommendations.
:type keywords: str
:param max_domain_recommendations: Maximum number of recommendations.
:type max_domain_recommendations: int
"""
_attribute_map = {
'keywords': {'key': 'keywords', 'type': 'str'},
'max_domain_recommendations': {'key': 'maxDomainRecommendations', 'type': 'int'},
}
def __init__(self, **kwargs):
super(DomainRecommendationSearchParameters, self).__init__(**kwargs)
self.keywords = kwargs.get('keywords', None)
self.max_domain_recommendations = kwargs.get('max_domain_recommendations', None)
class EnabledConfig(Model):
"""Enabled configuration.
:param enabled: True if configuration is enabled, false if it is disabled
and null if configuration is not set.
:type enabled: bool
"""
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(EnabledConfig, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
class EndpointDependency(Model):
"""A domain name that a service is reached at, including details of the
current connection status.
:param domain_name: The domain name of the dependency.
:type domain_name: str
:param endpoint_details: The IP Addresses and Ports used when connecting
to DomainName.
:type endpoint_details: list[~azure.mgmt.web.models.EndpointDetail]
"""
_attribute_map = {
'domain_name': {'key': 'domainName', 'type': 'str'},
'endpoint_details': {'key': 'endpointDetails', 'type': '[EndpointDetail]'},
}
def __init__(self, **kwargs):
super(EndpointDependency, self).__init__(**kwargs)
self.domain_name = kwargs.get('domain_name', None)
self.endpoint_details = kwargs.get('endpoint_details', None)
class EndpointDetail(Model):
"""Current TCP connectivity information from the App Service Environment to a
single endpoint.
:param ip_address: An IP Address that Domain Name currently resolves to.
:type ip_address: str
:param port: The port an endpoint is connected to.
:type port: int
:param latency: The time in milliseconds it takes for a TCP connection to
be created from the App Service Environment to this IpAddress at this
Port.
:type latency: float
:param is_accessable: Whether it is possible to create a TCP connection
from the App Service Environment to this IpAddress at this Port.
:type is_accessable: bool
"""
_attribute_map = {
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'port': {'key': 'port', 'type': 'int'},
'latency': {'key': 'latency', 'type': 'float'},
'is_accessable': {'key': 'isAccessable', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(EndpointDetail, self).__init__(**kwargs)
self.ip_address = kwargs.get('ip_address', None)
self.port = kwargs.get('port', None)
self.latency = kwargs.get('latency', None)
self.is_accessable = kwargs.get('is_accessable', None)
class ErrorEntity(Model):
"""Body of the error response returned from the API.
:param extended_code: Type of error.
:type extended_code: str
:param message_template: Message template.
:type message_template: str
:param parameters: Parameters for the template.
:type parameters: list[str]
:param inner_errors: Inner errors.
:type inner_errors: list[~azure.mgmt.web.models.ErrorEntity]
:param code: Basic error code.
:type code: str
:param message: Any details of the error.
:type message: str
"""
_attribute_map = {
'extended_code': {'key': 'extendedCode', 'type': 'str'},
'message_template': {'key': 'messageTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
'inner_errors': {'key': 'innerErrors', 'type': '[ErrorEntity]'},
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ErrorEntity, self).__init__(**kwargs)
self.extended_code = kwargs.get('extended_code', None)
self.message_template = kwargs.get('message_template', None)
self.parameters = kwargs.get('parameters', None)
self.inner_errors = kwargs.get('inner_errors', None)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
class Experiments(Model):
"""Routing rules in production experiments.
:param ramp_up_rules: List of ramp-up rules.
:type ramp_up_rules: list[~azure.mgmt.web.models.RampUpRule]
"""
_attribute_map = {
'ramp_up_rules': {'key': 'rampUpRules', 'type': '[RampUpRule]'},
}
def __init__(self, **kwargs):
super(Experiments, self).__init__(**kwargs)
self.ramp_up_rules = kwargs.get('ramp_up_rules', None)
class FileSystemApplicationLogsConfig(Model):
"""Application logs to file system configuration.
:param level: Log level. Possible values include: 'Off', 'Verbose',
'Information', 'Warning', 'Error'. Default value: "Off" .
:type level: str or ~azure.mgmt.web.models.LogLevel
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'LogLevel'},
}
def __init__(self, **kwargs):
super(FileSystemApplicationLogsConfig, self).__init__(**kwargs)
self.level = kwargs.get('level', "Off")
class FileSystemHttpLogsConfig(Model):
"""Http logs to file system configuration.
:param retention_in_mb: Maximum size in megabytes that http log files can
use.
When reached old log files will be removed to make space for new ones.
Value can range between 25 and 100.
:type retention_in_mb: int
:param retention_in_days: Retention in days.
Remove files older than X days.
0 or lower means no retention.
:type retention_in_days: int
:param enabled: True if configuration is enabled, false if it is disabled
and null if configuration is not set.
:type enabled: bool
"""
_validation = {
'retention_in_mb': {'maximum': 100, 'minimum': 25},
}
_attribute_map = {
'retention_in_mb': {'key': 'retentionInMb', 'type': 'int'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(FileSystemHttpLogsConfig, self).__init__(**kwargs)
self.retention_in_mb = kwargs.get('retention_in_mb', None)
self.retention_in_days = kwargs.get('retention_in_days', None)
self.enabled = kwargs.get('enabled', None)
class FunctionEnvelope(ProxyOnlyResource):
"""Web Job Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param function_app_id: Function App ID.
:type function_app_id: str
:param script_root_path_href: Script root path URI.
:type script_root_path_href: str
:param script_href: Script URI.
:type script_href: str
:param config_href: Config URI.
:type config_href: str
:param secrets_file_href: Secrets file URI.
:type secrets_file_href: str
:param href: Function URI.
:type href: str
:param config: Config information.
:type config: object
:param files: File list.
:type files: dict[str, str]
:param test_data: Test data used when testing via the Azure Portal.
:type test_data: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'function_app_id': {'key': 'properties.function_app_id', 'type': 'str'},
'script_root_path_href': {'key': 'properties.script_root_path_href', 'type': 'str'},
'script_href': {'key': 'properties.script_href', 'type': 'str'},
'config_href': {'key': 'properties.config_href', 'type': 'str'},
'secrets_file_href': {'key': 'properties.secrets_file_href', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'config': {'key': 'properties.config', 'type': 'object'},
'files': {'key': 'properties.files', 'type': '{str}'},
'test_data': {'key': 'properties.test_data', 'type': 'str'},
}
def __init__(self, **kwargs):
super(FunctionEnvelope, self).__init__(**kwargs)
self.function_app_id = kwargs.get('function_app_id', None)
self.script_root_path_href = kwargs.get('script_root_path_href', None)
self.script_href = kwargs.get('script_href', None)
self.config_href = kwargs.get('config_href', None)
self.secrets_file_href = kwargs.get('secrets_file_href', None)
self.href = kwargs.get('href', None)
self.config = kwargs.get('config', None)
self.files = kwargs.get('files', None)
self.test_data = kwargs.get('test_data', None)
class FunctionSecrets(ProxyOnlyResource):
"""Function secrets.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key: Secret key.
:type key: str
:param trigger_url: Trigger URL.
:type trigger_url: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key': {'key': 'properties.key', 'type': 'str'},
'trigger_url': {'key': 'properties.trigger_url', 'type': 'str'},
}
def __init__(self, **kwargs):
super(FunctionSecrets, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.trigger_url = kwargs.get('trigger_url', None)
class GeoDistribution(Model):
"""A global distribution definition.
:param location: Location.
:type location: str
:param number_of_workers: NumberOfWorkers.
:type number_of_workers: int
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'},
}
def __init__(self, **kwargs):
super(GeoDistribution, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.number_of_workers = kwargs.get('number_of_workers', None)
class GeoRegion(ProxyOnlyResource):
"""Geographical region.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar description: Region description.
:vartype description: str
:ivar display_name: Display name for region.
:vartype display_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'description': {'readonly': True},
'display_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(GeoRegion, self).__init__(**kwargs)
self.description = None
self.display_name = None
class GlobalCsmSkuDescription(Model):
"""A Global SKU Description.
:param name: Name of the resource SKU.
:type name: str
:param tier: Service Tier of the resource SKU.
:type tier: str
:param size: Size specifier of the resource SKU.
:type size: str
:param family: Family code of the resource SKU.
:type family: str
:param capacity: Min, max, and default scale values of the SKU.
:type capacity: ~azure.mgmt.web.models.SkuCapacity
:param locations: Locations of the SKU.
:type locations: list[str]
:param capabilities: Capabilities of the SKU, e.g., is traffic manager
enabled?
:type capabilities: list[~azure.mgmt.web.models.Capability]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'size': {'key': 'size', 'type': 'str'},
'family': {'key': 'family', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'SkuCapacity'},
'locations': {'key': 'locations', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': '[Capability]'},
}
def __init__(self, **kwargs):
super(GlobalCsmSkuDescription, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.size = kwargs.get('size', None)
self.family = kwargs.get('family', None)
self.capacity = kwargs.get('capacity', None)
self.locations = kwargs.get('locations', None)
self.capabilities = kwargs.get('capabilities', None)
class HandlerMapping(Model):
"""The IIS handler mappings used to define which handler processes HTTP
requests with certain extension.
For example, it is used to configure php-cgi.exe process to handle all HTTP
requests with *.php extension.
:param extension: Requests with this extension will be handled using the
specified FastCGI application.
:type extension: str
:param script_processor: The absolute path to the FastCGI application.
:type script_processor: str
:param arguments: Command-line arguments to be passed to the script
processor.
:type arguments: str
"""
_attribute_map = {
'extension': {'key': 'extension', 'type': 'str'},
'script_processor': {'key': 'scriptProcessor', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HandlerMapping, self).__init__(**kwargs)
self.extension = kwargs.get('extension', None)
self.script_processor = kwargs.get('script_processor', None)
self.arguments = kwargs.get('arguments', None)
class HostingEnvironmentDeploymentInfo(Model):
"""Information needed to create resources on an App Service Environment.
:param name: Name of the App Service Environment.
:type name: str
:param location: Location of the App Service Environment.
:type location: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HostingEnvironmentDeploymentInfo, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.location = kwargs.get('location', None)
class HostingEnvironmentDiagnostics(Model):
"""Diagnostics for an App Service Environment.
:param name: Name/identifier of the diagnostics.
:type name: str
:param diagnosics_output: Diagnostics output.
:type diagnosics_output: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'diagnosics_output': {'key': 'diagnosicsOutput', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HostingEnvironmentDiagnostics, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.diagnosics_output = kwargs.get('diagnosics_output', None)
class HostingEnvironmentProfile(Model):
"""Specification for an App Service Environment to use for this resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID of the App Service Environment.
:type id: str
:ivar name: Name of the App Service Environment.
:vartype name: str
:ivar type: Resource type of the App Service Environment.
:vartype type: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HostingEnvironmentProfile, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = None
self.type = None
class HostName(Model):
"""Details of a hostname derived from a domain.
:param name: Name of the hostname.
:type name: str
:param site_names: List of apps the hostname is assigned to. This list
will have more than one app only if the hostname is pointing to a Traffic
Manager.
:type site_names: list[str]
:param azure_resource_name: Name of the Azure resource the hostname is
assigned to. If it is assigned to a Traffic Manager then it will be the
Traffic Manager name otherwise it will be the app name.
:type azure_resource_name: str
:param azure_resource_type: Type of the Azure resource the hostname is
assigned to. Possible values include: 'Website', 'TrafficManager'
:type azure_resource_type: str or ~azure.mgmt.web.models.AzureResourceType
:param custom_host_name_dns_record_type: Type of the DNS record. Possible
values include: 'CName', 'A'
:type custom_host_name_dns_record_type: str or
~azure.mgmt.web.models.CustomHostNameDnsRecordType
:param host_name_type: Type of the hostname. Possible values include:
'Verified', 'Managed'
:type host_name_type: str or ~azure.mgmt.web.models.HostNameType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'site_names': {'key': 'siteNames', 'type': '[str]'},
'azure_resource_name': {'key': 'azureResourceName', 'type': 'str'},
'azure_resource_type': {'key': 'azureResourceType', 'type': 'AzureResourceType'},
'custom_host_name_dns_record_type': {'key': 'customHostNameDnsRecordType', 'type': 'CustomHostNameDnsRecordType'},
'host_name_type': {'key': 'hostNameType', 'type': 'HostNameType'},
}
def __init__(self, **kwargs):
super(HostName, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.site_names = kwargs.get('site_names', None)
self.azure_resource_name = kwargs.get('azure_resource_name', None)
self.azure_resource_type = kwargs.get('azure_resource_type', None)
self.custom_host_name_dns_record_type = kwargs.get('custom_host_name_dns_record_type', None)
self.host_name_type = kwargs.get('host_name_type', None)
class HostNameBinding(ProxyOnlyResource):
"""A hostname binding object.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param site_name: App Service app name.
:type site_name: str
:param domain_id: Fully qualified ARM domain resource URI.
:type domain_id: str
:param azure_resource_name: Azure resource name.
:type azure_resource_name: str
:param azure_resource_type: Azure resource type. Possible values include:
'Website', 'TrafficManager'
:type azure_resource_type: str or ~azure.mgmt.web.models.AzureResourceType
:param custom_host_name_dns_record_type: Custom DNS record type. Possible
values include: 'CName', 'A'
:type custom_host_name_dns_record_type: str or
~azure.mgmt.web.models.CustomHostNameDnsRecordType
:param host_name_type: Hostname type. Possible values include: 'Verified',
'Managed'
:type host_name_type: str or ~azure.mgmt.web.models.HostNameType
:param ssl_state: SSL type. Possible values include: 'Disabled',
'SniEnabled', 'IpBasedEnabled'
:type ssl_state: str or ~azure.mgmt.web.models.SslState
:param thumbprint: SSL certificate thumbprint
:type thumbprint: str
:ivar virtual_ip: Virtual IP address assigned to the hostname if IP based
SSL is enabled.
:vartype virtual_ip: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_ip': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'domain_id': {'key': 'properties.domainId', 'type': 'str'},
'azure_resource_name': {'key': 'properties.azureResourceName', 'type': 'str'},
'azure_resource_type': {'key': 'properties.azureResourceType', 'type': 'AzureResourceType'},
'custom_host_name_dns_record_type': {'key': 'properties.customHostNameDnsRecordType', 'type': 'CustomHostNameDnsRecordType'},
'host_name_type': {'key': 'properties.hostNameType', 'type': 'HostNameType'},
'ssl_state': {'key': 'properties.sslState', 'type': 'SslState'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'virtual_ip': {'key': 'properties.virtualIP', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HostNameBinding, self).__init__(**kwargs)
self.site_name = kwargs.get('site_name', None)
self.domain_id = kwargs.get('domain_id', None)
self.azure_resource_name = kwargs.get('azure_resource_name', None)
self.azure_resource_type = kwargs.get('azure_resource_type', None)
self.custom_host_name_dns_record_type = kwargs.get('custom_host_name_dns_record_type', None)
self.host_name_type = kwargs.get('host_name_type', None)
self.ssl_state = kwargs.get('ssl_state', None)
self.thumbprint = kwargs.get('thumbprint', None)
self.virtual_ip = None
class HostNameSslState(Model):
"""SSL-enabled hostname.
:param name: Hostname.
:type name: str
:param ssl_state: SSL type. Possible values include: 'Disabled',
'SniEnabled', 'IpBasedEnabled'
:type ssl_state: str or ~azure.mgmt.web.models.SslState
:param virtual_ip: Virtual IP address assigned to the hostname if IP based
SSL is enabled.
:type virtual_ip: str
:param thumbprint: SSL certificate thumbprint.
:type thumbprint: str
:param to_update: Set to <code>true</code> to update existing hostname.
:type to_update: bool
:param host_type: Indicates whether the hostname is a standard or
repository hostname. Possible values include: 'Standard', 'Repository'
:type host_type: str or ~azure.mgmt.web.models.HostType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'ssl_state': {'key': 'sslState', 'type': 'SslState'},
'virtual_ip': {'key': 'virtualIP', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'to_update': {'key': 'toUpdate', 'type': 'bool'},
'host_type': {'key': 'hostType', 'type': 'HostType'},
}
def __init__(self, **kwargs):
super(HostNameSslState, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.ssl_state = kwargs.get('ssl_state', None)
self.virtual_ip = kwargs.get('virtual_ip', None)
self.thumbprint = kwargs.get('thumbprint', None)
self.to_update = kwargs.get('to_update', None)
self.host_type = kwargs.get('host_type', None)
class HttpLogsConfig(Model):
"""Http logs configuration.
:param file_system: Http logs to file system configuration.
:type file_system: ~azure.mgmt.web.models.FileSystemHttpLogsConfig
:param azure_blob_storage: Http logs to azure blob storage configuration.
:type azure_blob_storage:
~azure.mgmt.web.models.AzureBlobStorageHttpLogsConfig
"""
_attribute_map = {
'file_system': {'key': 'fileSystem', 'type': 'FileSystemHttpLogsConfig'},
'azure_blob_storage': {'key': 'azureBlobStorage', 'type': 'AzureBlobStorageHttpLogsConfig'},
}
def __init__(self, **kwargs):
super(HttpLogsConfig, self).__init__(**kwargs)
self.file_system = kwargs.get('file_system', None)
self.azure_blob_storage = kwargs.get('azure_blob_storage', None)
class HybridConnection(ProxyOnlyResource):
"""Hybrid Connection contract. This is used to configure a Hybrid Connection.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param service_bus_namespace: The name of the Service Bus namespace.
:type service_bus_namespace: str
:param relay_name: The name of the Service Bus relay.
:type relay_name: str
:param relay_arm_uri: The ARM URI to the Service Bus relay.
:type relay_arm_uri: str
:param hostname: The hostname of the endpoint.
:type hostname: str
:param port: The port of the endpoint.
:type port: int
:param send_key_name: The name of the Service Bus key which has Send
permissions. This is used to authenticate to Service Bus.
:type send_key_name: str
:param send_key_value: The value of the Service Bus key. This is used to
authenticate to Service Bus. In ARM this key will not be returned
normally, use the POST /listKeys API instead.
:type send_key_value: str
:param service_bus_suffix: The suffix for the service bus endpoint. By
default this is .servicebus.windows.net
:type service_bus_suffix: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'},
'relay_name': {'key': 'properties.relayName', 'type': 'str'},
'relay_arm_uri': {'key': 'properties.relayArmUri', 'type': 'str'},
'hostname': {'key': 'properties.hostname', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'send_key_name': {'key': 'properties.sendKeyName', 'type': 'str'},
'send_key_value': {'key': 'properties.sendKeyValue', 'type': 'str'},
'service_bus_suffix': {'key': 'properties.serviceBusSuffix', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HybridConnection, self).__init__(**kwargs)
self.service_bus_namespace = kwargs.get('service_bus_namespace', None)
self.relay_name = kwargs.get('relay_name', None)
self.relay_arm_uri = kwargs.get('relay_arm_uri', None)
self.hostname = kwargs.get('hostname', None)
self.port = kwargs.get('port', None)
self.send_key_name = kwargs.get('send_key_name', None)
self.send_key_value = kwargs.get('send_key_value', None)
self.service_bus_suffix = kwargs.get('service_bus_suffix', None)
class HybridConnectionKey(ProxyOnlyResource):
"""Hybrid Connection key contract. This has the send key name and value for a
Hybrid Connection.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar send_key_name: The name of the send key.
:vartype send_key_name: str
:ivar send_key_value: The value of the send key.
:vartype send_key_value: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'send_key_name': {'readonly': True},
'send_key_value': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'send_key_name': {'key': 'properties.sendKeyName', 'type': 'str'},
'send_key_value': {'key': 'properties.sendKeyValue', 'type': 'str'},
}
def __init__(self, **kwargs):
super(HybridConnectionKey, self).__init__(**kwargs)
self.send_key_name = None
self.send_key_value = None
class HybridConnectionLimits(ProxyOnlyResource):
"""Hybrid Connection limits contract. This is used to return the plan limits
of Hybrid Connections.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar current: The current number of Hybrid Connections.
:vartype current: int
:ivar maximum: The maximum number of Hybrid Connections allowed.
:vartype maximum: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'current': {'readonly': True},
'maximum': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'current': {'key': 'properties.current', 'type': 'int'},
'maximum': {'key': 'properties.maximum', 'type': 'int'},
}
def __init__(self, **kwargs):
super(HybridConnectionLimits, self).__init__(**kwargs)
self.current = None
self.maximum = None
class Identifier(ProxyOnlyResource):
"""A domain specific resource identifier.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param identifier_id: String representation of the identity.
:type identifier_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'identifier_id': {'key': 'properties.id', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Identifier, self).__init__(**kwargs)
self.identifier_id = kwargs.get('identifier_id', None)
class InboundEnvironmentEndpoint(Model):
"""The IP Addresses and Ports that require inbound network access to and
within the subnet of the App Service Environment.
:param description: Short text describing the purpose of the network
traffic.
:type description: str
:param endpoints: The IP addresses that network traffic will originate
from in cidr notation.
:type endpoints: list[str]
:param ports: The ports that network traffic will arrive to the App
Service Environment at.
:type ports: list[str]
"""
_attribute_map = {
'description': {'key': 'description', 'type': 'str'},
'endpoints': {'key': 'endpoints', 'type': '[str]'},
'ports': {'key': 'ports', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(InboundEnvironmentEndpoint, self).__init__(**kwargs)
self.description = kwargs.get('description', None)
self.endpoints = kwargs.get('endpoints', None)
self.ports = kwargs.get('ports', None)
class IpSecurityRestriction(Model):
"""IP security restriction on an app.
:param ip_address: IP address the security restriction is valid for.
It can be in form of pure ipv4 address (required SubnetMask property) or
CIDR notation such as ipv4/mask (leading bit match). For CIDR,
SubnetMask property must not be specified.
:type ip_address: str
:param subnet_mask: Subnet mask for the range of IP addresses the
restriction is valid for.
:type subnet_mask: str
:param vnet_subnet_resource_id: Virtual network resource id
:type vnet_subnet_resource_id: str
:param vnet_traffic_tag: (internal) Vnet traffic tag
:type vnet_traffic_tag: int
:param subnet_traffic_tag: (internal) Subnet traffic tag
:type subnet_traffic_tag: int
:param action: Allow or Deny access for this IP range.
:type action: str
:param tag: Defines what this IP filter will be used for. This is to
support IP filtering on proxies. Possible values include: 'Default',
'XffProxy'
:type tag: str or ~azure.mgmt.web.models.IpFilterTag
:param priority: Priority of IP restriction rule.
:type priority: int
:param name: IP restriction rule name.
:type name: str
:param description: IP restriction rule description.
:type description: str
"""
_attribute_map = {
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'subnet_mask': {'key': 'subnetMask', 'type': 'str'},
'vnet_subnet_resource_id': {'key': 'vnetSubnetResourceId', 'type': 'str'},
'vnet_traffic_tag': {'key': 'vnetTrafficTag', 'type': 'int'},
'subnet_traffic_tag': {'key': 'subnetTrafficTag', 'type': 'int'},
'action': {'key': 'action', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'IpFilterTag'},
'priority': {'key': 'priority', 'type': 'int'},
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(IpSecurityRestriction, self).__init__(**kwargs)
self.ip_address = kwargs.get('ip_address', None)
self.subnet_mask = kwargs.get('subnet_mask', None)
self.vnet_subnet_resource_id = kwargs.get('vnet_subnet_resource_id', None)
self.vnet_traffic_tag = kwargs.get('vnet_traffic_tag', None)
self.subnet_traffic_tag = kwargs.get('subnet_traffic_tag', None)
self.action = kwargs.get('action', None)
self.tag = kwargs.get('tag', None)
self.priority = kwargs.get('priority', None)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
class LocalizableString(Model):
"""Localizable string object containing the name and a localized value.
:param value: Non-localized name.
:type value: str
:param localized_value: Localized name.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self, **kwargs):
super(LocalizableString, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.localized_value = kwargs.get('localized_value', None)
class LogSpecification(Model):
"""Log Definition of a single resource metric.
:param name:
:type name: str
:param display_name:
:type display_name: str
:param blob_duration:
:type blob_duration: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(self, **kwargs):
super(LogSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.blob_duration = kwargs.get('blob_duration', None)
class ManagedServiceIdentity(Model):
"""Managed service identity.
Variables are only populated by the server, and will be ignored when
sending a request.
:param type: Type of managed service identity. Possible values include:
'SystemAssigned', 'UserAssigned', 'SystemAssigned, UserAssigned', 'None'
:type type: str or ~azure.mgmt.web.models.ManagedServiceIdentityType
:ivar tenant_id: Tenant of managed service identity.
:vartype tenant_id: str
:ivar principal_id: Principal Id of managed service identity.
:vartype principal_id: str
:param user_assigned_identities: The list of user assigned identities
associated with the resource. The user identity dictionary key references
will be ARM resource ids in the form:
'/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}
:type user_assigned_identities: dict[str,
~azure.mgmt.web.models.ManagedServiceIdentityUserAssignedIdentitiesValue]
"""
_validation = {
'tenant_id': {'readonly': True},
'principal_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{ManagedServiceIdentityUserAssignedIdentitiesValue}'},
}
def __init__(self, **kwargs):
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.tenant_id = None
self.principal_id = None
self.user_assigned_identities = kwargs.get('user_assigned_identities', None)
class ManagedServiceIdentityUserAssignedIdentitiesValue(Model):
"""ManagedServiceIdentityUserAssignedIdentitiesValue.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar principal_id: Principal Id of user assigned identity
:vartype principal_id: str
:ivar client_id: Client Id of user assigned identity
:vartype client_id: str
"""
_validation = {
'principal_id': {'readonly': True},
'client_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'client_id': {'key': 'clientId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ManagedServiceIdentityUserAssignedIdentitiesValue, self).__init__(**kwargs)
self.principal_id = None
self.client_id = None
class MetricAvailabilily(Model):
"""Metric availability and retention.
:param time_grain: Time grain.
:type time_grain: str
:param retention: Retention period for the current time grain.
:type retention: str
"""
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'retention': {'key': 'retention', 'type': 'str'},
}
def __init__(self, **kwargs):
super(MetricAvailabilily, self).__init__(**kwargs)
self.time_grain = kwargs.get('time_grain', None)
self.retention = kwargs.get('retention', None)
class MetricAvailability(Model):
"""Retention policy of a resource metric.
:param time_grain:
:type time_grain: str
:param blob_duration:
:type blob_duration: str
"""
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'blob_duration': {'key': 'blobDuration', 'type': 'str'},
}
def __init__(self, **kwargs):
super(MetricAvailability, self).__init__(**kwargs)
self.time_grain = kwargs.get('time_grain', None)
self.blob_duration = kwargs.get('blob_duration', None)
class MetricDefinition(ProxyOnlyResource):
"""Metadata for a metric.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar unit: Unit of the metric.
:vartype unit: str
:ivar primary_aggregation_type: Primary aggregation type.
:vartype primary_aggregation_type: str
:ivar metric_availabilities: List of time grains supported for the metric
together with retention period.
:vartype metric_availabilities:
list[~azure.mgmt.web.models.MetricAvailabilily]
:ivar display_name: Friendly name shown in the UI.
:vartype display_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'unit': {'readonly': True},
'primary_aggregation_type': {'readonly': True},
'metric_availabilities': {'readonly': True},
'display_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'unit': {'key': 'properties.unit', 'type': 'str'},
'primary_aggregation_type': {'key': 'properties.primaryAggregationType', 'type': 'str'},
'metric_availabilities': {'key': 'properties.metricAvailabilities', 'type': '[MetricAvailabilily]'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(MetricDefinition, self).__init__(**kwargs)
self.unit = None
self.primary_aggregation_type = None
self.metric_availabilities = None
self.display_name = None
class MetricSpecification(Model):
"""Definition of a single resource metric.
:param name:
:type name: str
:param display_name:
:type display_name: str
:param display_description:
:type display_description: str
:param unit:
:type unit: str
:param aggregation_type:
:type aggregation_type: str
:param supports_instance_level_aggregation:
:type supports_instance_level_aggregation: bool
:param enable_regional_mdm_account:
:type enable_regional_mdm_account: bool
:param source_mdm_account:
:type source_mdm_account: str
:param source_mdm_namespace:
:type source_mdm_namespace: str
:param metric_filter_pattern:
:type metric_filter_pattern: str
:param fill_gap_with_zero:
:type fill_gap_with_zero: bool
:param is_internal:
:type is_internal: bool
:param dimensions:
:type dimensions: list[~azure.mgmt.web.models.Dimension]
:param category:
:type category: str
:param availabilities:
:type availabilities: list[~azure.mgmt.web.models.MetricAvailability]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'display_description': {'key': 'displayDescription', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'aggregation_type': {'key': 'aggregationType', 'type': 'str'},
'supports_instance_level_aggregation': {'key': 'supportsInstanceLevelAggregation', 'type': 'bool'},
'enable_regional_mdm_account': {'key': 'enableRegionalMdmAccount', 'type': 'bool'},
'source_mdm_account': {'key': 'sourceMdmAccount', 'type': 'str'},
'source_mdm_namespace': {'key': 'sourceMdmNamespace', 'type': 'str'},
'metric_filter_pattern': {'key': 'metricFilterPattern', 'type': 'str'},
'fill_gap_with_zero': {'key': 'fillGapWithZero', 'type': 'bool'},
'is_internal': {'key': 'isInternal', 'type': 'bool'},
'dimensions': {'key': 'dimensions', 'type': '[Dimension]'},
'category': {'key': 'category', 'type': 'str'},
'availabilities': {'key': 'availabilities', 'type': '[MetricAvailability]'},
}
def __init__(self, **kwargs):
super(MetricSpecification, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.display_name = kwargs.get('display_name', None)
self.display_description = kwargs.get('display_description', None)
self.unit = kwargs.get('unit', None)
self.aggregation_type = kwargs.get('aggregation_type', None)
self.supports_instance_level_aggregation = kwargs.get('supports_instance_level_aggregation', None)
self.enable_regional_mdm_account = kwargs.get('enable_regional_mdm_account', None)
self.source_mdm_account = kwargs.get('source_mdm_account', None)
self.source_mdm_namespace = kwargs.get('source_mdm_namespace', None)
self.metric_filter_pattern = kwargs.get('metric_filter_pattern', None)
self.fill_gap_with_zero = kwargs.get('fill_gap_with_zero', None)
self.is_internal = kwargs.get('is_internal', None)
self.dimensions = kwargs.get('dimensions', None)
self.category = kwargs.get('category', None)
self.availabilities = kwargs.get('availabilities', None)
class MigrateMySqlRequest(ProxyOnlyResource):
"""MySQL migration request.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param connection_string: Required. Connection string to the remote MySQL
database.
:type connection_string: str
:param migration_type: Required. The type of migration operation to be
done. Possible values include: 'LocalToRemote', 'RemoteToLocal'
:type migration_type: str or ~azure.mgmt.web.models.MySqlMigrationType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'connection_string': {'required': True},
'migration_type': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string': {'key': 'properties.connectionString', 'type': 'str'},
'migration_type': {'key': 'properties.migrationType', 'type': 'MySqlMigrationType'},
}
def __init__(self, **kwargs):
super(MigrateMySqlRequest, self).__init__(**kwargs)
self.connection_string = kwargs.get('connection_string', None)
self.migration_type = kwargs.get('migration_type', None)
class MigrateMySqlStatus(ProxyOnlyResource):
"""MySQL migration status.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar migration_operation_status: Status of the migration task. Possible
values include: 'InProgress', 'Failed', 'Succeeded', 'TimedOut', 'Created'
:vartype migration_operation_status: str or
~azure.mgmt.web.models.OperationStatus
:ivar operation_id: Operation ID for the migration task.
:vartype operation_id: str
:ivar local_my_sql_enabled: True if the web app has in app MySql enabled
:vartype local_my_sql_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'migration_operation_status': {'readonly': True},
'operation_id': {'readonly': True},
'local_my_sql_enabled': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'migration_operation_status': {'key': 'properties.migrationOperationStatus', 'type': 'OperationStatus'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
'local_my_sql_enabled': {'key': 'properties.localMySqlEnabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(MigrateMySqlStatus, self).__init__(**kwargs)
self.migration_operation_status = None
self.operation_id = None
self.local_my_sql_enabled = None
class MSDeploy(ProxyOnlyResource):
"""MSDeploy ARM PUT information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param package_uri: Package URI
:type package_uri: str
:param connection_string: SQL Connection String
:type connection_string: str
:param db_type: Database Type
:type db_type: str
:param set_parameters_xml_file_uri: URI of MSDeploy Parameters file. Must
not be set if SetParameters is used.
:type set_parameters_xml_file_uri: str
:param set_parameters: MSDeploy Parameters. Must not be set if
SetParametersXmlFileUri is used.
:type set_parameters: dict[str, str]
:param skip_app_data: Controls whether the MSDeploy operation skips the
App_Data directory.
If set to <code>true</code>, the existing App_Data directory on the
destination
will not be deleted, and any App_Data directory in the source will be
ignored.
Setting is <code>false</code> by default.
:type skip_app_data: bool
:param app_offline: Sets the AppOffline rule while the MSDeploy operation
executes.
Setting is <code>false</code> by default.
:type app_offline: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'package_uri': {'key': 'properties.packageUri', 'type': 'str'},
'connection_string': {'key': 'properties.connectionString', 'type': 'str'},
'db_type': {'key': 'properties.dbType', 'type': 'str'},
'set_parameters_xml_file_uri': {'key': 'properties.setParametersXmlFileUri', 'type': 'str'},
'set_parameters': {'key': 'properties.setParameters', 'type': '{str}'},
'skip_app_data': {'key': 'properties.skipAppData', 'type': 'bool'},
'app_offline': {'key': 'properties.appOffline', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(MSDeploy, self).__init__(**kwargs)
self.package_uri = kwargs.get('package_uri', None)
self.connection_string = kwargs.get('connection_string', None)
self.db_type = kwargs.get('db_type', None)
self.set_parameters_xml_file_uri = kwargs.get('set_parameters_xml_file_uri', None)
self.set_parameters = kwargs.get('set_parameters', None)
self.skip_app_data = kwargs.get('skip_app_data', None)
self.app_offline = kwargs.get('app_offline', None)
class MSDeployLog(ProxyOnlyResource):
"""MSDeploy log.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar entries: List of log entry messages
:vartype entries: list[~azure.mgmt.web.models.MSDeployLogEntry]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'entries': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'entries': {'key': 'properties.entries', 'type': '[MSDeployLogEntry]'},
}
def __init__(self, **kwargs):
super(MSDeployLog, self).__init__(**kwargs)
self.entries = None
class MSDeployLogEntry(Model):
"""MSDeploy log entry.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar time: Timestamp of log entry
:vartype time: datetime
:ivar type: Log entry type. Possible values include: 'Message', 'Warning',
'Error'
:vartype type: str or ~azure.mgmt.web.models.MSDeployLogEntryType
:ivar message: Log entry message
:vartype message: str
"""
_validation = {
'time': {'readonly': True},
'type': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'time': {'key': 'time', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'MSDeployLogEntryType'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(MSDeployLogEntry, self).__init__(**kwargs)
self.time = None
self.type = None
self.message = None
class MSDeployStatus(ProxyOnlyResource):
"""MSDeploy ARM response.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar deployer: Username of deployer
:vartype deployer: str
:ivar provisioning_state: Provisioning state. Possible values include:
'accepted', 'running', 'succeeded', 'failed', 'canceled'
:vartype provisioning_state: str or
~azure.mgmt.web.models.MSDeployProvisioningState
:ivar start_time: Start time of deploy operation
:vartype start_time: datetime
:ivar end_time: End time of deploy operation
:vartype end_time: datetime
:ivar complete: Whether the deployment operation has completed
:vartype complete: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'deployer': {'readonly': True},
'provisioning_state': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'complete': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'deployer': {'key': 'properties.deployer', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'MSDeployProvisioningState'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'complete': {'key': 'properties.complete', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(MSDeployStatus, self).__init__(**kwargs)
self.deployer = None
self.provisioning_state = None
self.start_time = None
self.end_time = None
self.complete = None
class NameIdentifier(Model):
"""Identifies an object.
:param name: Name of the object.
:type name: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, **kwargs):
super(NameIdentifier, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
class NameValuePair(Model):
"""Name value pair.
:param name: Pair name.
:type name: str
:param value: Pair value.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(self, **kwargs):
super(NameValuePair, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.value = kwargs.get('value', None)
class NetworkAccessControlEntry(Model):
"""Network access control entry.
:param action: Action object. Possible values include: 'Permit', 'Deny'
:type action: str or ~azure.mgmt.web.models.AccessControlEntryAction
:param description: Description of network access control entry.
:type description: str
:param order: Order of precedence.
:type order: int
:param remote_subnet: Remote subnet.
:type remote_subnet: str
"""
_attribute_map = {
'action': {'key': 'action', 'type': 'AccessControlEntryAction'},
'description': {'key': 'description', 'type': 'str'},
'order': {'key': 'order', 'type': 'int'},
'remote_subnet': {'key': 'remoteSubnet', 'type': 'str'},
}
def __init__(self, **kwargs):
super(NetworkAccessControlEntry, self).__init__(**kwargs)
self.action = kwargs.get('action', None)
self.description = kwargs.get('description', None)
self.order = kwargs.get('order', None)
self.remote_subnet = kwargs.get('remote_subnet', None)
class NetworkFeatures(ProxyOnlyResource):
"""Full view of network features for an app (presently VNET integration and
Hybrid Connections).
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar virtual_network_name: The Virtual Network name.
:vartype virtual_network_name: str
:ivar virtual_network_connection: The Virtual Network summary view.
:vartype virtual_network_connection: ~azure.mgmt.web.models.VnetInfo
:ivar hybrid_connections: The Hybrid Connections summary view.
:vartype hybrid_connections:
list[~azure.mgmt.web.models.RelayServiceConnectionEntity]
:ivar hybrid_connections_v2: The Hybrid Connection V2 (Service Bus) view.
:vartype hybrid_connections_v2:
list[~azure.mgmt.web.models.HybridConnection]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_name': {'readonly': True},
'virtual_network_connection': {'readonly': True},
'hybrid_connections': {'readonly': True},
'hybrid_connections_v2': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'virtual_network_name': {'key': 'properties.virtualNetworkName', 'type': 'str'},
'virtual_network_connection': {'key': 'properties.virtualNetworkConnection', 'type': 'VnetInfo'},
'hybrid_connections': {'key': 'properties.hybridConnections', 'type': '[RelayServiceConnectionEntity]'},
'hybrid_connections_v2': {'key': 'properties.hybridConnectionsV2', 'type': '[HybridConnection]'},
}
def __init__(self, **kwargs):
super(NetworkFeatures, self).__init__(**kwargs)
self.virtual_network_name = None
self.virtual_network_connection = None
self.hybrid_connections = None
self.hybrid_connections_v2 = None
class NetworkTrace(Model):
"""Network trace.
:param path: Local file path for the captured network trace file.
:type path: str
:param status: Current status of the network trace operation, same as
Operation.Status (InProgress/Succeeded/Failed).
:type status: str
:param message: Detailed message of a network trace operation, e.g. error
message in case of failure.
:type message: str
"""
_attribute_map = {
'path': {'key': 'path', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(NetworkTrace, self).__init__(**kwargs)
self.path = kwargs.get('path', None)
self.status = kwargs.get('status', None)
self.message = kwargs.get('message', None)
class Operation(Model):
"""An operation on a resource.
:param id: Operation ID.
:type id: str
:param name: Operation name.
:type name: str
:param status: The current status of the operation. Possible values
include: 'InProgress', 'Failed', 'Succeeded', 'TimedOut', 'Created'
:type status: str or ~azure.mgmt.web.models.OperationStatus
:param errors: Any errors associate with the operation.
:type errors: list[~azure.mgmt.web.models.ErrorEntity]
:param created_time: Time when operation has started.
:type created_time: datetime
:param modified_time: Time when operation has been updated.
:type modified_time: datetime
:param expiration_time: Time when operation will expire.
:type expiration_time: datetime
:param geo_master_operation_id: Applicable only for stamp operation ids.
:type geo_master_operation_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'OperationStatus'},
'errors': {'key': 'errors', 'type': '[ErrorEntity]'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'expirationTime', 'type': 'iso-8601'},
'geo_master_operation_id': {'key': 'geoMasterOperationId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Operation, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.status = kwargs.get('status', None)
self.errors = kwargs.get('errors', None)
self.created_time = kwargs.get('created_time', None)
self.modified_time = kwargs.get('modified_time', None)
self.expiration_time = kwargs.get('expiration_time', None)
self.geo_master_operation_id = kwargs.get('geo_master_operation_id', None)
class OutboundEnvironmentEndpoint(Model):
"""Endpoints accessed for a common purpose that the App Service Environment
requires outbound network access to.
:param category: The type of service accessed by the App Service
Environment, e.g., Azure Storage, Azure SQL Database, and Azure Active
Directory.
:type category: str
:param endpoints: The endpoints that the App Service Environment reaches
the service at.
:type endpoints: list[~azure.mgmt.web.models.EndpointDependency]
"""
_attribute_map = {
'category': {'key': 'category', 'type': 'str'},
'endpoints': {'key': 'endpoints', 'type': '[EndpointDependency]'},
}
def __init__(self, **kwargs):
super(OutboundEnvironmentEndpoint, self).__init__(**kwargs)
self.category = kwargs.get('category', None)
self.endpoints = kwargs.get('endpoints', None)
class PerfMonResponse(Model):
"""Performance monitor API response.
:param code: The response code.
:type code: str
:param message: The message.
:type message: str
:param data: The performance monitor counters.
:type data: ~azure.mgmt.web.models.PerfMonSet
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'data': {'key': 'data', 'type': 'PerfMonSet'},
}
def __init__(self, **kwargs):
super(PerfMonResponse, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
self.data = kwargs.get('data', None)
class PerfMonSample(Model):
"""Performance monitor sample in a set.
:param time: Point in time for which counter was measured.
:type time: datetime
:param instance_name: Name of the server on which the measurement is made.
:type instance_name: str
:param value: Value of counter at a certain time.
:type value: float
"""
_attribute_map = {
'time': {'key': 'time', 'type': 'iso-8601'},
'instance_name': {'key': 'instanceName', 'type': 'str'},
'value': {'key': 'value', 'type': 'float'},
}
def __init__(self, **kwargs):
super(PerfMonSample, self).__init__(**kwargs)
self.time = kwargs.get('time', None)
self.instance_name = kwargs.get('instance_name', None)
self.value = kwargs.get('value', None)
class PerfMonSet(Model):
"""Metric information.
:param name: Unique key name of the counter.
:type name: str
:param start_time: Start time of the period.
:type start_time: datetime
:param end_time: End time of the period.
:type end_time: datetime
:param time_grain: Presented time grain.
:type time_grain: str
:param values: Collection of workers that are active during this time.
:type values: list[~azure.mgmt.web.models.PerfMonSample]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'values': {'key': 'values', 'type': '[PerfMonSample]'},
}
def __init__(self, **kwargs):
super(PerfMonSet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.time_grain = kwargs.get('time_grain', None)
self.values = kwargs.get('values', None)
class PremierAddOn(Resource):
"""Premier add-on.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param sku: Premier add on SKU.
:type sku: str
:param product: Premier add on Product.
:type product: str
:param vendor: Premier add on Vendor.
:type vendor: str
:param marketplace_publisher: Premier add on Marketplace publisher.
:type marketplace_publisher: str
:param marketplace_offer: Premier add on Marketplace offer.
:type marketplace_offer: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'vendor': {'key': 'properties.vendor', 'type': 'str'},
'marketplace_publisher': {'key': 'properties.marketplacePublisher', 'type': 'str'},
'marketplace_offer': {'key': 'properties.marketplaceOffer', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PremierAddOn, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.product = kwargs.get('product', None)
self.vendor = kwargs.get('vendor', None)
self.marketplace_publisher = kwargs.get('marketplace_publisher', None)
self.marketplace_offer = kwargs.get('marketplace_offer', None)
class PremierAddOnOffer(ProxyOnlyResource):
"""Premier add-on offer.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param sku: Premier add on SKU.
:type sku: str
:param product: Premier add on offer Product.
:type product: str
:param vendor: Premier add on offer Vendor.
:type vendor: str
:param promo_code_required: <code>true</code> if promotion code is
required; otherwise, <code>false</code>.
:type promo_code_required: bool
:param quota: Premier add on offer Quota.
:type quota: int
:param web_hosting_plan_restrictions: App Service plans this offer is
restricted to. Possible values include: 'None', 'Free', 'Shared', 'Basic',
'Standard', 'Premium'
:type web_hosting_plan_restrictions: str or
~azure.mgmt.web.models.AppServicePlanRestrictions
:param privacy_policy_url: Privacy policy URL.
:type privacy_policy_url: str
:param legal_terms_url: Legal terms URL.
:type legal_terms_url: str
:param marketplace_publisher: Marketplace publisher.
:type marketplace_publisher: str
:param marketplace_offer: Marketplace offer.
:type marketplace_offer: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'vendor': {'key': 'properties.vendor', 'type': 'str'},
'promo_code_required': {'key': 'properties.promoCodeRequired', 'type': 'bool'},
'quota': {'key': 'properties.quota', 'type': 'int'},
'web_hosting_plan_restrictions': {'key': 'properties.webHostingPlanRestrictions', 'type': 'AppServicePlanRestrictions'},
'privacy_policy_url': {'key': 'properties.privacyPolicyUrl', 'type': 'str'},
'legal_terms_url': {'key': 'properties.legalTermsUrl', 'type': 'str'},
'marketplace_publisher': {'key': 'properties.marketplacePublisher', 'type': 'str'},
'marketplace_offer': {'key': 'properties.marketplaceOffer', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PremierAddOnOffer, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.product = kwargs.get('product', None)
self.vendor = kwargs.get('vendor', None)
self.promo_code_required = kwargs.get('promo_code_required', None)
self.quota = kwargs.get('quota', None)
self.web_hosting_plan_restrictions = kwargs.get('web_hosting_plan_restrictions', None)
self.privacy_policy_url = kwargs.get('privacy_policy_url', None)
self.legal_terms_url = kwargs.get('legal_terms_url', None)
self.marketplace_publisher = kwargs.get('marketplace_publisher', None)
self.marketplace_offer = kwargs.get('marketplace_offer', None)
class PremierAddOnPatchResource(ProxyOnlyResource):
"""ARM resource for a PremierAddOn.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param sku: Premier add on SKU.
:type sku: str
:param product: Premier add on Product.
:type product: str
:param vendor: Premier add on Vendor.
:type vendor: str
:param marketplace_publisher: Premier add on Marketplace publisher.
:type marketplace_publisher: str
:param marketplace_offer: Premier add on Marketplace offer.
:type marketplace_offer: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'vendor': {'key': 'properties.vendor', 'type': 'str'},
'marketplace_publisher': {'key': 'properties.marketplacePublisher', 'type': 'str'},
'marketplace_offer': {'key': 'properties.marketplaceOffer', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PremierAddOnPatchResource, self).__init__(**kwargs)
self.sku = kwargs.get('sku', None)
self.product = kwargs.get('product', None)
self.vendor = kwargs.get('vendor', None)
self.marketplace_publisher = kwargs.get('marketplace_publisher', None)
self.marketplace_offer = kwargs.get('marketplace_offer', None)
class PrivateAccess(ProxyOnlyResource):
"""Description of the parameters of Private Access for a Web Site.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param enabled: Whether private access is enabled or not.
:type enabled: bool
:param virtual_networks: The Virtual Networks (and subnets) allowed to
access the site privately.
:type virtual_networks:
list[~azure.mgmt.web.models.PrivateAccessVirtualNetwork]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'virtual_networks': {'key': 'properties.virtualNetworks', 'type': '[PrivateAccessVirtualNetwork]'},
}
def __init__(self, **kwargs):
super(PrivateAccess, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.virtual_networks = kwargs.get('virtual_networks', None)
class PrivateAccessSubnet(Model):
"""Description of a Virtual Network subnet that is useable for private site
access.
:param name: The name of the subnet.
:type name: str
:param key: The key (ID) of the subnet.
:type key: int
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'key': {'key': 'key', 'type': 'int'},
}
def __init__(self, **kwargs):
super(PrivateAccessSubnet, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.key = kwargs.get('key', None)
class PrivateAccessVirtualNetwork(Model):
"""Description of a Virtual Network that is useable for private site access.
:param name: The name of the Virtual Network.
:type name: str
:param key: The key (ID) of the Virtual Network.
:type key: int
:param resource_id: The ARM uri of the Virtual Network
:type resource_id: str
:param subnets: A List of subnets that access is allowed to on this
Virtual Network. An empty array (but not null) is interpreted to mean that
all subnets are allowed within this Virtual Network.
:type subnets: list[~azure.mgmt.web.models.PrivateAccessSubnet]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'key': {'key': 'key', 'type': 'int'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'subnets': {'key': 'subnets', 'type': '[PrivateAccessSubnet]'},
}
def __init__(self, **kwargs):
super(PrivateAccessVirtualNetwork, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.key = kwargs.get('key', None)
self.resource_id = kwargs.get('resource_id', None)
self.subnets = kwargs.get('subnets', None)
class ProcessInfo(ProxyOnlyResource):
"""Process Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar identifier: ARM Identifier for deployment.
:vartype identifier: int
:param deployment_name: Deployment name.
:type deployment_name: str
:param href: HRef URI.
:type href: str
:param minidump: Minidump URI.
:type minidump: str
:param is_profile_running: Is profile running?
:type is_profile_running: bool
:param is_iis_profile_running: Is the IIS Profile running?
:type is_iis_profile_running: bool
:param iis_profile_timeout_in_seconds: IIS Profile timeout (seconds).
:type iis_profile_timeout_in_seconds: float
:param parent: Parent process.
:type parent: str
:param children: Child process list.
:type children: list[str]
:param threads: Thread list.
:type threads: list[~azure.mgmt.web.models.ProcessThreadInfo]
:param open_file_handles: List of open files.
:type open_file_handles: list[str]
:param modules: List of modules.
:type modules: list[~azure.mgmt.web.models.ProcessModuleInfo]
:param file_name: File name of this process.
:type file_name: str
:param command_line: Command line.
:type command_line: str
:param user_name: User name.
:type user_name: str
:param handle_count: Handle count.
:type handle_count: int
:param module_count: Module count.
:type module_count: int
:param thread_count: Thread count.
:type thread_count: int
:param start_time: Start time.
:type start_time: datetime
:param total_cpu_time: Total CPU time.
:type total_cpu_time: str
:param user_cpu_time: User CPU time.
:type user_cpu_time: str
:param privileged_cpu_time: Privileged CPU time.
:type privileged_cpu_time: str
:param working_set: Working set.
:type working_set: long
:param peak_working_set: Peak working set.
:type peak_working_set: long
:param private_memory: Private memory size.
:type private_memory: long
:param virtual_memory: Virtual memory size.
:type virtual_memory: long
:param peak_virtual_memory: Peak virtual memory usage.
:type peak_virtual_memory: long
:param paged_system_memory: Paged system memory.
:type paged_system_memory: long
:param non_paged_system_memory: Non-paged system memory.
:type non_paged_system_memory: long
:param paged_memory: Paged memory.
:type paged_memory: long
:param peak_paged_memory: Peak paged memory.
:type peak_paged_memory: long
:param time_stamp: Time stamp.
:type time_stamp: datetime
:param environment_variables: List of environment variables.
:type environment_variables: dict[str, str]
:param is_scm_site: Is this the SCM site?
:type is_scm_site: bool
:param is_webjob: Is this a Web Job?
:type is_webjob: bool
:param description: Description of process.
:type description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'identifier': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'identifier': {'key': 'properties.identifier', 'type': 'int'},
'deployment_name': {'key': 'properties.deployment_name', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'minidump': {'key': 'properties.minidump', 'type': 'str'},
'is_profile_running': {'key': 'properties.is_profile_running', 'type': 'bool'},
'is_iis_profile_running': {'key': 'properties.is_iis_profile_running', 'type': 'bool'},
'iis_profile_timeout_in_seconds': {'key': 'properties.iis_profile_timeout_in_seconds', 'type': 'float'},
'parent': {'key': 'properties.parent', 'type': 'str'},
'children': {'key': 'properties.children', 'type': '[str]'},
'threads': {'key': 'properties.threads', 'type': '[ProcessThreadInfo]'},
'open_file_handles': {'key': 'properties.open_file_handles', 'type': '[str]'},
'modules': {'key': 'properties.modules', 'type': '[ProcessModuleInfo]'},
'file_name': {'key': 'properties.file_name', 'type': 'str'},
'command_line': {'key': 'properties.command_line', 'type': 'str'},
'user_name': {'key': 'properties.user_name', 'type': 'str'},
'handle_count': {'key': 'properties.handle_count', 'type': 'int'},
'module_count': {'key': 'properties.module_count', 'type': 'int'},
'thread_count': {'key': 'properties.thread_count', 'type': 'int'},
'start_time': {'key': 'properties.start_time', 'type': 'iso-8601'},
'total_cpu_time': {'key': 'properties.total_cpu_time', 'type': 'str'},
'user_cpu_time': {'key': 'properties.user_cpu_time', 'type': 'str'},
'privileged_cpu_time': {'key': 'properties.privileged_cpu_time', 'type': 'str'},
'working_set': {'key': 'properties.working_set', 'type': 'long'},
'peak_working_set': {'key': 'properties.peak_working_set', 'type': 'long'},
'private_memory': {'key': 'properties.private_memory', 'type': 'long'},
'virtual_memory': {'key': 'properties.virtual_memory', 'type': 'long'},
'peak_virtual_memory': {'key': 'properties.peak_virtual_memory', 'type': 'long'},
'paged_system_memory': {'key': 'properties.paged_system_memory', 'type': 'long'},
'non_paged_system_memory': {'key': 'properties.non_paged_system_memory', 'type': 'long'},
'paged_memory': {'key': 'properties.paged_memory', 'type': 'long'},
'peak_paged_memory': {'key': 'properties.peak_paged_memory', 'type': 'long'},
'time_stamp': {'key': 'properties.time_stamp', 'type': 'iso-8601'},
'environment_variables': {'key': 'properties.environment_variables', 'type': '{str}'},
'is_scm_site': {'key': 'properties.is_scm_site', 'type': 'bool'},
'is_webjob': {'key': 'properties.is_webjob', 'type': 'bool'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ProcessInfo, self).__init__(**kwargs)
self.identifier = None
self.deployment_name = kwargs.get('deployment_name', None)
self.href = kwargs.get('href', None)
self.minidump = kwargs.get('minidump', None)
self.is_profile_running = kwargs.get('is_profile_running', None)
self.is_iis_profile_running = kwargs.get('is_iis_profile_running', None)
self.iis_profile_timeout_in_seconds = kwargs.get('iis_profile_timeout_in_seconds', None)
self.parent = kwargs.get('parent', None)
self.children = kwargs.get('children', None)
self.threads = kwargs.get('threads', None)
self.open_file_handles = kwargs.get('open_file_handles', None)
self.modules = kwargs.get('modules', None)
self.file_name = kwargs.get('file_name', None)
self.command_line = kwargs.get('command_line', None)
self.user_name = kwargs.get('user_name', None)
self.handle_count = kwargs.get('handle_count', None)
self.module_count = kwargs.get('module_count', None)
self.thread_count = kwargs.get('thread_count', None)
self.start_time = kwargs.get('start_time', None)
self.total_cpu_time = kwargs.get('total_cpu_time', None)
self.user_cpu_time = kwargs.get('user_cpu_time', None)
self.privileged_cpu_time = kwargs.get('privileged_cpu_time', None)
self.working_set = kwargs.get('working_set', None)
self.peak_working_set = kwargs.get('peak_working_set', None)
self.private_memory = kwargs.get('private_memory', None)
self.virtual_memory = kwargs.get('virtual_memory', None)
self.peak_virtual_memory = kwargs.get('peak_virtual_memory', None)
self.paged_system_memory = kwargs.get('paged_system_memory', None)
self.non_paged_system_memory = kwargs.get('non_paged_system_memory', None)
self.paged_memory = kwargs.get('paged_memory', None)
self.peak_paged_memory = kwargs.get('peak_paged_memory', None)
self.time_stamp = kwargs.get('time_stamp', None)
self.environment_variables = kwargs.get('environment_variables', None)
self.is_scm_site = kwargs.get('is_scm_site', None)
self.is_webjob = kwargs.get('is_webjob', None)
self.description = kwargs.get('description', None)
class ProcessModuleInfo(ProxyOnlyResource):
"""Process Module Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param base_address: Base address. Used as module identifier in ARM
resource URI.
:type base_address: str
:param file_name: File name.
:type file_name: str
:param href: HRef URI.
:type href: str
:param file_path: File path.
:type file_path: str
:param module_memory_size: Module memory size.
:type module_memory_size: int
:param file_version: File version.
:type file_version: str
:param file_description: File description.
:type file_description: str
:param product: Product name.
:type product: str
:param product_version: Product version.
:type product_version: str
:param is_debug: Is debug?
:type is_debug: bool
:param language: Module language (locale).
:type language: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'base_address': {'key': 'properties.base_address', 'type': 'str'},
'file_name': {'key': 'properties.file_name', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'file_path': {'key': 'properties.file_path', 'type': 'str'},
'module_memory_size': {'key': 'properties.module_memory_size', 'type': 'int'},
'file_version': {'key': 'properties.file_version', 'type': 'str'},
'file_description': {'key': 'properties.file_description', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'product_version': {'key': 'properties.product_version', 'type': 'str'},
'is_debug': {'key': 'properties.is_debug', 'type': 'bool'},
'language': {'key': 'properties.language', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ProcessModuleInfo, self).__init__(**kwargs)
self.base_address = kwargs.get('base_address', None)
self.file_name = kwargs.get('file_name', None)
self.href = kwargs.get('href', None)
self.file_path = kwargs.get('file_path', None)
self.module_memory_size = kwargs.get('module_memory_size', None)
self.file_version = kwargs.get('file_version', None)
self.file_description = kwargs.get('file_description', None)
self.product = kwargs.get('product', None)
self.product_version = kwargs.get('product_version', None)
self.is_debug = kwargs.get('is_debug', None)
self.language = kwargs.get('language', None)
class ProcessThreadInfo(ProxyOnlyResource):
"""Process Thread Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar identifier: Site extension ID.
:vartype identifier: int
:param href: HRef URI.
:type href: str
:param process: Process URI.
:type process: str
:param start_address: Start address.
:type start_address: str
:param current_priority: Current thread priority.
:type current_priority: int
:param priority_level: Thread priority level.
:type priority_level: str
:param base_priority: Base priority.
:type base_priority: int
:param start_time: Start time.
:type start_time: datetime
:param total_processor_time: Total processor time.
:type total_processor_time: str
:param user_processor_time: User processor time.
:type user_processor_time: str
:param priviledged_processor_time: Privileged processor time.
:type priviledged_processor_time: str
:param state: Thread state.
:type state: str
:param wait_reason: Wait reason.
:type wait_reason: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'identifier': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'identifier': {'key': 'properties.identifier', 'type': 'int'},
'href': {'key': 'properties.href', 'type': 'str'},
'process': {'key': 'properties.process', 'type': 'str'},
'start_address': {'key': 'properties.start_address', 'type': 'str'},
'current_priority': {'key': 'properties.current_priority', 'type': 'int'},
'priority_level': {'key': 'properties.priority_level', 'type': 'str'},
'base_priority': {'key': 'properties.base_priority', 'type': 'int'},
'start_time': {'key': 'properties.start_time', 'type': 'iso-8601'},
'total_processor_time': {'key': 'properties.total_processor_time', 'type': 'str'},
'user_processor_time': {'key': 'properties.user_processor_time', 'type': 'str'},
'priviledged_processor_time': {'key': 'properties.priviledged_processor_time', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'wait_reason': {'key': 'properties.wait_reason', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ProcessThreadInfo, self).__init__(**kwargs)
self.identifier = None
self.href = kwargs.get('href', None)
self.process = kwargs.get('process', None)
self.start_address = kwargs.get('start_address', None)
self.current_priority = kwargs.get('current_priority', None)
self.priority_level = kwargs.get('priority_level', None)
self.base_priority = kwargs.get('base_priority', None)
self.start_time = kwargs.get('start_time', None)
self.total_processor_time = kwargs.get('total_processor_time', None)
self.user_processor_time = kwargs.get('user_processor_time', None)
self.priviledged_processor_time = kwargs.get('priviledged_processor_time', None)
self.state = kwargs.get('state', None)
self.wait_reason = kwargs.get('wait_reason', None)
class PublicCertificate(ProxyOnlyResource):
"""Public certificate object.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param blob: Public Certificate byte array
:type blob: bytearray
:param public_certificate_location: Public Certificate Location. Possible
values include: 'CurrentUserMy', 'LocalMachineMy', 'Unknown'
:type public_certificate_location: str or
~azure.mgmt.web.models.PublicCertificateLocation
:ivar thumbprint: Certificate Thumbprint
:vartype thumbprint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'thumbprint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'blob': {'key': 'properties.blob', 'type': 'bytearray'},
'public_certificate_location': {'key': 'properties.publicCertificateLocation', 'type': 'PublicCertificateLocation'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PublicCertificate, self).__init__(**kwargs)
self.blob = kwargs.get('blob', None)
self.public_certificate_location = kwargs.get('public_certificate_location', None)
self.thumbprint = None
class PushSettings(ProxyOnlyResource):
"""Push settings for the App.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param is_push_enabled: Required. Gets or sets a flag indicating whether
the Push endpoint is enabled.
:type is_push_enabled: bool
:param tag_whitelist_json: Gets or sets a JSON string containing a list of
tags that are whitelisted for use by the push registration endpoint.
:type tag_whitelist_json: str
:param tags_requiring_auth: Gets or sets a JSON string containing a list
of tags that require user authentication to be used in the push
registration endpoint.
Tags can consist of alphanumeric characters and the following:
'_', '@', '#', '.', ':', '-'.
Validation should be performed at the PushRequestHandler.
:type tags_requiring_auth: str
:param dynamic_tags_json: Gets or sets a JSON string containing a list of
dynamic tags that will be evaluated from user claims in the push
registration endpoint.
:type dynamic_tags_json: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'is_push_enabled': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_push_enabled': {'key': 'properties.isPushEnabled', 'type': 'bool'},
'tag_whitelist_json': {'key': 'properties.tagWhitelistJson', 'type': 'str'},
'tags_requiring_auth': {'key': 'properties.tagsRequiringAuth', 'type': 'str'},
'dynamic_tags_json': {'key': 'properties.dynamicTagsJson', 'type': 'str'},
}
def __init__(self, **kwargs):
super(PushSettings, self).__init__(**kwargs)
self.is_push_enabled = kwargs.get('is_push_enabled', None)
self.tag_whitelist_json = kwargs.get('tag_whitelist_json', None)
self.tags_requiring_auth = kwargs.get('tags_requiring_auth', None)
self.dynamic_tags_json = kwargs.get('dynamic_tags_json', None)
class RampUpRule(Model):
"""Routing rules for ramp up testing. This rule allows to redirect static
traffic % to a slot or to gradually change routing % based on performance.
:param action_host_name: Hostname of a slot to which the traffic will be
redirected if decided to. E.g. myapp-stage.azurewebsites.net.
:type action_host_name: str
:param reroute_percentage: Percentage of the traffic which will be
redirected to <code>ActionHostName</code>.
:type reroute_percentage: float
:param change_step: In auto ramp up scenario this is the step to
add/remove from <code>ReroutePercentage</code> until it reaches
<code>MinReroutePercentage</code> or <code>MaxReroutePercentage</code>.
Site metrics are checked every N minutes specified in
<code>ChangeIntervalInMinutes</code>.
Custom decision algorithm can be provided in TiPCallback site extension
which URL can be specified in <code>ChangeDecisionCallbackUrl</code>.
:type change_step: float
:param change_interval_in_minutes: Specifies interval in minutes to
reevaluate ReroutePercentage.
:type change_interval_in_minutes: int
:param min_reroute_percentage: Specifies lower boundary above which
ReroutePercentage will stay.
:type min_reroute_percentage: float
:param max_reroute_percentage: Specifies upper boundary below which
ReroutePercentage will stay.
:type max_reroute_percentage: float
:param change_decision_callback_url: Custom decision algorithm can be
provided in TiPCallback site extension which URL can be specified. See
TiPCallback site extension for the scaffold and contracts.
https://www.siteextensions.net/packages/TiPCallback/
:type change_decision_callback_url: str
:param name: Name of the routing rule. The recommended name would be to
point to the slot which will receive the traffic in the experiment.
:type name: str
"""
_attribute_map = {
'action_host_name': {'key': 'actionHostName', 'type': 'str'},
'reroute_percentage': {'key': 'reroutePercentage', 'type': 'float'},
'change_step': {'key': 'changeStep', 'type': 'float'},
'change_interval_in_minutes': {'key': 'changeIntervalInMinutes', 'type': 'int'},
'min_reroute_percentage': {'key': 'minReroutePercentage', 'type': 'float'},
'max_reroute_percentage': {'key': 'maxReroutePercentage', 'type': 'float'},
'change_decision_callback_url': {'key': 'changeDecisionCallbackUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RampUpRule, self).__init__(**kwargs)
self.action_host_name = kwargs.get('action_host_name', None)
self.reroute_percentage = kwargs.get('reroute_percentage', None)
self.change_step = kwargs.get('change_step', None)
self.change_interval_in_minutes = kwargs.get('change_interval_in_minutes', None)
self.min_reroute_percentage = kwargs.get('min_reroute_percentage', None)
self.max_reroute_percentage = kwargs.get('max_reroute_percentage', None)
self.change_decision_callback_url = kwargs.get('change_decision_callback_url', None)
self.name = kwargs.get('name', None)
class Recommendation(ProxyOnlyResource):
"""Represents a recommendation result generated by the recommendation engine.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param creation_time: Timestamp when this instance was created.
:type creation_time: datetime
:param recommendation_id: A GUID value that each recommendation object is
associated with.
:type recommendation_id: str
:param resource_id: Full ARM resource ID string that this recommendation
object is associated with.
:type resource_id: str
:param resource_scope: Name of a resource type this recommendation
applies, e.g. Subscription, ServerFarm, Site. Possible values include:
'ServerFarm', 'Subscription', 'WebSite'
:type resource_scope: str or ~azure.mgmt.web.models.ResourceScopeType
:param rule_name: Unique name of the rule.
:type rule_name: str
:param display_name: UI friendly name of the rule (may not be unique).
:type display_name: str
:param message: Recommendation text.
:type message: str
:param level: Level indicating how critical this recommendation can
impact. Possible values include: 'Critical', 'Warning', 'Information',
'NonUrgentSuggestion'
:type level: str or ~azure.mgmt.web.models.NotificationLevel
:param channels: List of channels that this recommendation can apply.
Possible values include: 'Notification', 'Api', 'Email', 'Webhook', 'All'
:type channels: str or ~azure.mgmt.web.models.Channels
:ivar category_tags: The list of category tags that this recommendation
belongs to.
:vartype category_tags: list[str]
:param action_name: Name of action recommended by this object.
:type action_name: str
:param enabled: True if this recommendation is still valid (i.e.
"actionable"). False if it is invalid.
:type enabled: int
:param states: The list of states of this recommendation. If it's null
then it should be considered "Active".
:type states: list[str]
:param start_time: The beginning time in UTC of a range that the
recommendation refers to.
:type start_time: datetime
:param end_time: The end time in UTC of a range that the recommendation
refers to.
:type end_time: datetime
:param next_notification_time: When to notify this recommendation next in
UTC. Null means that this will never be notified anymore.
:type next_notification_time: datetime
:param notification_expiration_time: Date and time in UTC when this
notification expires.
:type notification_expiration_time: datetime
:param notified_time: Last timestamp in UTC this instance was actually
notified. Null means that this recommendation hasn't been notified yet.
:type notified_time: datetime
:param score: A metric value measured by the rule.
:type score: float
:param is_dynamic: True if this is associated with a dynamically added
rule
:type is_dynamic: bool
:param extension_name: Extension name of the portal if exists.
:type extension_name: str
:param blade_name: Deep link to a blade on the portal.
:type blade_name: str
:param forward_link: Forward link to an external document associated with
the rule.
:type forward_link: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'category_tags': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
'recommendation_id': {'key': 'properties.recommendationId', 'type': 'str'},
'resource_id': {'key': 'properties.resourceId', 'type': 'str'},
'resource_scope': {'key': 'properties.resourceScope', 'type': 'str'},
'rule_name': {'key': 'properties.ruleName', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'message': {'key': 'properties.message', 'type': 'str'},
'level': {'key': 'properties.level', 'type': 'NotificationLevel'},
'channels': {'key': 'properties.channels', 'type': 'Channels'},
'category_tags': {'key': 'properties.categoryTags', 'type': '[str]'},
'action_name': {'key': 'properties.actionName', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'int'},
'states': {'key': 'properties.states', 'type': '[str]'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'next_notification_time': {'key': 'properties.nextNotificationTime', 'type': 'iso-8601'},
'notification_expiration_time': {'key': 'properties.notificationExpirationTime', 'type': 'iso-8601'},
'notified_time': {'key': 'properties.notifiedTime', 'type': 'iso-8601'},
'score': {'key': 'properties.score', 'type': 'float'},
'is_dynamic': {'key': 'properties.isDynamic', 'type': 'bool'},
'extension_name': {'key': 'properties.extensionName', 'type': 'str'},
'blade_name': {'key': 'properties.bladeName', 'type': 'str'},
'forward_link': {'key': 'properties.forwardLink', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Recommendation, self).__init__(**kwargs)
self.creation_time = kwargs.get('creation_time', None)
self.recommendation_id = kwargs.get('recommendation_id', None)
self.resource_id = kwargs.get('resource_id', None)
self.resource_scope = kwargs.get('resource_scope', None)
self.rule_name = kwargs.get('rule_name', None)
self.display_name = kwargs.get('display_name', None)
self.message = kwargs.get('message', None)
self.level = kwargs.get('level', None)
self.channels = kwargs.get('channels', None)
self.category_tags = None
self.action_name = kwargs.get('action_name', None)
self.enabled = kwargs.get('enabled', None)
self.states = kwargs.get('states', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.next_notification_time = kwargs.get('next_notification_time', None)
self.notification_expiration_time = kwargs.get('notification_expiration_time', None)
self.notified_time = kwargs.get('notified_time', None)
self.score = kwargs.get('score', None)
self.is_dynamic = kwargs.get('is_dynamic', None)
self.extension_name = kwargs.get('extension_name', None)
self.blade_name = kwargs.get('blade_name', None)
self.forward_link = kwargs.get('forward_link', None)
class RecommendationRule(ProxyOnlyResource):
"""Represents a recommendation rule that the recommendation engine can
perform.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param recommendation_name: Unique name of the rule.
:type recommendation_name: str
:param display_name: UI friendly name of the rule.
:type display_name: str
:param message: Localized name of the rule (Good for UI).
:type message: str
:param recommendation_id: Recommendation ID of an associated
recommendation object tied to the rule, if exists.
If such an object doesn't exist, it is set to null.
:type recommendation_id: str
:param description: Localized detailed description of the rule.
:type description: str
:param action_name: Name of action that is recommended by this rule in
string.
:type action_name: str
:param level: Level of impact indicating how critical this rule is.
Possible values include: 'Critical', 'Warning', 'Information',
'NonUrgentSuggestion'
:type level: str or ~azure.mgmt.web.models.NotificationLevel
:param channels: List of available channels that this rule applies.
Possible values include: 'Notification', 'Api', 'Email', 'Webhook', 'All'
:type channels: str or ~azure.mgmt.web.models.Channels
:ivar category_tags: The list of category tags that this recommendation
rule belongs to.
:vartype category_tags: list[str]
:param is_dynamic: True if this is associated with a dynamically added
rule
:type is_dynamic: bool
:param extension_name: Extension name of the portal if exists. Applicable
to dynamic rule only.
:type extension_name: str
:param blade_name: Deep link to a blade on the portal. Applicable to
dynamic rule only.
:type blade_name: str
:param forward_link: Forward link to an external document associated with
the rule. Applicable to dynamic rule only.
:type forward_link: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'category_tags': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'recommendation_name': {'key': 'properties.recommendationName', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'message': {'key': 'properties.message', 'type': 'str'},
'recommendation_id': {'key': 'properties.recommendationId', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'action_name': {'key': 'properties.actionName', 'type': 'str'},
'level': {'key': 'properties.level', 'type': 'NotificationLevel'},
'channels': {'key': 'properties.channels', 'type': 'Channels'},
'category_tags': {'key': 'properties.categoryTags', 'type': '[str]'},
'is_dynamic': {'key': 'properties.isDynamic', 'type': 'bool'},
'extension_name': {'key': 'properties.extensionName', 'type': 'str'},
'blade_name': {'key': 'properties.bladeName', 'type': 'str'},
'forward_link': {'key': 'properties.forwardLink', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RecommendationRule, self).__init__(**kwargs)
self.recommendation_name = kwargs.get('recommendation_name', None)
self.display_name = kwargs.get('display_name', None)
self.message = kwargs.get('message', None)
self.recommendation_id = kwargs.get('recommendation_id', None)
self.description = kwargs.get('description', None)
self.action_name = kwargs.get('action_name', None)
self.level = kwargs.get('level', None)
self.channels = kwargs.get('channels', None)
self.category_tags = None
self.is_dynamic = kwargs.get('is_dynamic', None)
self.extension_name = kwargs.get('extension_name', None)
self.blade_name = kwargs.get('blade_name', None)
self.forward_link = kwargs.get('forward_link', None)
class ReissueCertificateOrderRequest(ProxyOnlyResource):
"""Class representing certificate reissue request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key_size: Certificate Key Size.
:type key_size: int
:param delay_existing_revoke_in_hours: Delay in hours to revoke existing
certificate after the new certificate is issued.
:type delay_existing_revoke_in_hours: int
:param csr: Csr to be used for re-key operation.
:type csr: str
:param is_private_key_external: Should we change the ASC type (from
managed private key to external private key and vice versa).
:type is_private_key_external: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'delay_existing_revoke_in_hours': {'key': 'properties.delayExistingRevokeInHours', 'type': 'int'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(ReissueCertificateOrderRequest, self).__init__(**kwargs)
self.key_size = kwargs.get('key_size', None)
self.delay_existing_revoke_in_hours = kwargs.get('delay_existing_revoke_in_hours', None)
self.csr = kwargs.get('csr', None)
self.is_private_key_external = kwargs.get('is_private_key_external', None)
class RelayServiceConnectionEntity(ProxyOnlyResource):
"""Hybrid Connection for an App Service app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param entity_name:
:type entity_name: str
:param entity_connection_string:
:type entity_connection_string: str
:param resource_type:
:type resource_type: str
:param resource_connection_string:
:type resource_connection_string: str
:param hostname:
:type hostname: str
:param port:
:type port: int
:param biztalk_uri:
:type biztalk_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'entity_name': {'key': 'properties.entityName', 'type': 'str'},
'entity_connection_string': {'key': 'properties.entityConnectionString', 'type': 'str'},
'resource_type': {'key': 'properties.resourceType', 'type': 'str'},
'resource_connection_string': {'key': 'properties.resourceConnectionString', 'type': 'str'},
'hostname': {'key': 'properties.hostname', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'biztalk_uri': {'key': 'properties.biztalkUri', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RelayServiceConnectionEntity, self).__init__(**kwargs)
self.entity_name = kwargs.get('entity_name', None)
self.entity_connection_string = kwargs.get('entity_connection_string', None)
self.resource_type = kwargs.get('resource_type', None)
self.resource_connection_string = kwargs.get('resource_connection_string', None)
self.hostname = kwargs.get('hostname', None)
self.port = kwargs.get('port', None)
self.biztalk_uri = kwargs.get('biztalk_uri', None)
class Rendering(Model):
"""Instructions for rendering the data.
:param type: Rendering Type. Possible values include: 'NoGraph', 'Table',
'TimeSeries', 'TimeSeriesPerInstance'
:type type: str or ~azure.mgmt.web.models.RenderingType
:param title: Title of data
:type title: str
:param description: Description of the data that will help it be
interpreted
:type description: str
"""
_attribute_map = {
'type': {'key': 'type', 'type': 'RenderingType'},
'title': {'key': 'title', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Rendering, self).__init__(**kwargs)
self.type = kwargs.get('type', None)
self.title = kwargs.get('title', None)
self.description = kwargs.get('description', None)
class RenewCertificateOrderRequest(ProxyOnlyResource):
"""Class representing certificate renew request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key_size: Certificate Key Size.
:type key_size: int
:param csr: Csr to be used for re-key operation.
:type csr: str
:param is_private_key_external: Should we change the ASC type (from
managed private key to external private key and vice versa).
:type is_private_key_external: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(RenewCertificateOrderRequest, self).__init__(**kwargs)
self.key_size = kwargs.get('key_size', None)
self.csr = kwargs.get('csr', None)
self.is_private_key_external = kwargs.get('is_private_key_external', None)
class RequestsBasedTrigger(Model):
"""Trigger based on total requests.
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RequestsBasedTrigger, self).__init__(**kwargs)
self.count = kwargs.get('count', None)
self.time_interval = kwargs.get('time_interval', None)
class ResourceHealthMetadata(ProxyOnlyResource):
"""Used for getting ResourceHealthCheck settings.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param category: The category that the resource matches in the RHC Policy
File
:type category: str
:param signal_availability: Is there a health signal for the resource
:type signal_availability: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'category': {'key': 'properties.category', 'type': 'str'},
'signal_availability': {'key': 'properties.signalAvailability', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(ResourceHealthMetadata, self).__init__(**kwargs)
self.category = kwargs.get('category', None)
self.signal_availability = kwargs.get('signal_availability', None)
class ResourceMetric(Model):
"""Object representing a metric for any resource .
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: Name of metric.
:vartype name: ~azure.mgmt.web.models.ResourceMetricName
:ivar unit: Metric unit.
:vartype unit: str
:ivar time_grain: Metric granularity. E.g PT1H, PT5M, P1D
:vartype time_grain: str
:ivar start_time: Metric start time.
:vartype start_time: datetime
:ivar end_time: Metric end time.
:vartype end_time: datetime
:ivar resource_id: Metric resource Id.
:vartype resource_id: str
:ivar id: Resource Id.
:vartype id: str
:ivar metric_values: Metric values.
:vartype metric_values: list[~azure.mgmt.web.models.ResourceMetricValue]
:ivar properties: Resource metric properties collection.
:vartype properties: list[~azure.mgmt.web.models.ResourceMetricProperty]
"""
_validation = {
'name': {'readonly': True},
'unit': {'readonly': True},
'time_grain': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'resource_id': {'readonly': True},
'id': {'readonly': True},
'metric_values': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'ResourceMetricName'},
'unit': {'key': 'unit', 'type': 'str'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'metric_values': {'key': 'metricValues', 'type': '[ResourceMetricValue]'},
'properties': {'key': 'properties', 'type': '[ResourceMetricProperty]'},
}
def __init__(self, **kwargs):
super(ResourceMetric, self).__init__(**kwargs)
self.name = None
self.unit = None
self.time_grain = None
self.start_time = None
self.end_time = None
self.resource_id = None
self.id = None
self.metric_values = None
self.properties = None
class ResourceMetricAvailability(Model):
"""Metrics availability and retention.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar time_grain: Time grain .
:vartype time_grain: str
:ivar retention: Retention period for the current time grain.
:vartype retention: str
"""
_validation = {
'time_grain': {'readonly': True},
'retention': {'readonly': True},
}
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'retention': {'key': 'retention', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceMetricAvailability, self).__init__(**kwargs)
self.time_grain = None
self.retention = None
class ResourceMetricDefinition(ProxyOnlyResource):
"""Metadata for the metrics.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar unit: Unit of the metric.
:vartype unit: str
:ivar primary_aggregation_type: Primary aggregation type.
:vartype primary_aggregation_type: str
:ivar metric_availabilities: List of time grains supported for the metric
together with retention period.
:vartype metric_availabilities:
list[~azure.mgmt.web.models.ResourceMetricAvailability]
:ivar resource_uri: Resource URI.
:vartype resource_uri: str
:ivar properties: Resource metric definition properties.
:vartype properties: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'unit': {'readonly': True},
'primary_aggregation_type': {'readonly': True},
'metric_availabilities': {'readonly': True},
'resource_uri': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'unit': {'key': 'properties.unit', 'type': 'str'},
'primary_aggregation_type': {'key': 'properties.primaryAggregationType', 'type': 'str'},
'metric_availabilities': {'key': 'properties.metricAvailabilities', 'type': '[ResourceMetricAvailability]'},
'resource_uri': {'key': 'properties.resourceUri', 'type': 'str'},
'properties': {'key': 'properties.properties', 'type': '{str}'},
}
def __init__(self, **kwargs):
super(ResourceMetricDefinition, self).__init__(**kwargs)
self.unit = None
self.primary_aggregation_type = None
self.metric_availabilities = None
self.resource_uri = None
self.properties = None
class ResourceMetricName(Model):
"""Name of a metric for any resource .
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar value: metric name value.
:vartype value: str
:ivar localized_value: Localized metric name value.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceMetricName, self).__init__(**kwargs)
self.value = None
self.localized_value = None
class ResourceMetricProperty(Model):
"""Resource metric property.
:param key: Key for resource metric property.
:type key: str
:param value: Value of pair.
:type value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceMetricProperty, self).__init__(**kwargs)
self.key = kwargs.get('key', None)
self.value = kwargs.get('value', None)
class ResourceMetricValue(Model):
"""Value of resource metric.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar timestamp: Value timestamp.
:vartype timestamp: str
:ivar average: Value average.
:vartype average: float
:ivar minimum: Value minimum.
:vartype minimum: float
:ivar maximum: Value maximum.
:vartype maximum: float
:ivar total: Value total.
:vartype total: float
:ivar count: Value count.
:vartype count: float
:ivar properties: Resource metric properties collection.
:vartype properties: list[~azure.mgmt.web.models.ResourceMetricProperty]
"""
_validation = {
'timestamp': {'readonly': True},
'average': {'readonly': True},
'minimum': {'readonly': True},
'maximum': {'readonly': True},
'total': {'readonly': True},
'count': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'str'},
'average': {'key': 'average', 'type': 'float'},
'minimum': {'key': 'minimum', 'type': 'float'},
'maximum': {'key': 'maximum', 'type': 'float'},
'total': {'key': 'total', 'type': 'float'},
'count': {'key': 'count', 'type': 'float'},
'properties': {'key': 'properties', 'type': '[ResourceMetricProperty]'},
}
def __init__(self, **kwargs):
super(ResourceMetricValue, self).__init__(**kwargs)
self.timestamp = None
self.average = None
self.minimum = None
self.maximum = None
self.total = None
self.count = None
self.properties = None
class ResourceNameAvailability(Model):
"""Information regarding availability of a resource name.
:param name_available: <code>true</code> indicates name is valid and
available. <code>false</code> indicates the name is invalid, unavailable,
or both.
:type name_available: bool
:param reason: <code>Invalid</code> indicates the name provided does not
match Azure App Service naming requirements. <code>AlreadyExists</code>
indicates that the name is already in use and is therefore unavailable.
Possible values include: 'Invalid', 'AlreadyExists'
:type reason: str or ~azure.mgmt.web.models.InAvailabilityReasonType
:param message: If reason == invalid, provide the user with the reason why
the given name is invalid, and provide the resource naming requirements so
that the user can select a valid name. If reason == AlreadyExists, explain
that resource name is already in use, and direct them to select a
different name.
:type message: str
"""
_attribute_map = {
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ResourceNameAvailability, self).__init__(**kwargs)
self.name_available = kwargs.get('name_available', None)
self.reason = kwargs.get('reason', None)
self.message = kwargs.get('message', None)
class ResourceNameAvailabilityRequest(Model):
"""Resource name availability request content.
All required parameters must be populated in order to send to Azure.
:param name: Required. Resource name to verify.
:type name: str
:param type: Required. Resource type used for verification. Possible
values include: 'Site', 'Slot', 'HostingEnvironment', 'PublishingUser',
'Microsoft.Web/sites', 'Microsoft.Web/sites/slots',
'Microsoft.Web/hostingEnvironments', 'Microsoft.Web/publishingUsers'
:type type: str or ~azure.mgmt.web.models.CheckNameResourceTypes
:param is_fqdn: Is fully qualified domain name.
:type is_fqdn: bool
"""
_validation = {
'name': {'required': True},
'type': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_fqdn': {'key': 'isFqdn', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(ResourceNameAvailabilityRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.is_fqdn = kwargs.get('is_fqdn', None)
class ResponseMetaData(Model):
"""ResponseMetaData.
:param data_source: Source of the Data
:type data_source: ~azure.mgmt.web.models.DataSource
"""
_attribute_map = {
'data_source': {'key': 'dataSource', 'type': 'DataSource'},
}
def __init__(self, **kwargs):
super(ResponseMetaData, self).__init__(**kwargs)
self.data_source = kwargs.get('data_source', None)
class RestoreRequest(ProxyOnlyResource):
"""Description of a restore request.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param storage_account_url: Required. SAS URL to the container.
:type storage_account_url: str
:param blob_name: Name of a blob which contains the backup.
:type blob_name: str
:param overwrite: Required. <code>true</code> if the restore operation can
overwrite target app; otherwise, <code>false</code>. <code>true</code> is
needed if trying to restore over an existing app.
:type overwrite: bool
:param site_name: Name of an app.
:type site_name: str
:param databases: Collection of databases which should be restored. This
list has to match the list of databases included in the backup.
:type databases: list[~azure.mgmt.web.models.DatabaseBackupSetting]
:param ignore_conflicting_host_names: Changes a logic when restoring an
app with custom domains. <code>true</code> to remove custom domains
automatically. If <code>false</code>, custom domains are added to
the app's object when it is being restored, but that might fail due to
conflicts during the operation. Default value: False .
:type ignore_conflicting_host_names: bool
:param ignore_databases: Ignore the databases and only restore the site
content. Default value: False .
:type ignore_databases: bool
:param app_service_plan: Specify app service plan that will own restored
site.
:type app_service_plan: str
:param operation_type: Operation type. Possible values include: 'Default',
'Clone', 'Relocation', 'Snapshot', 'CloudFS'. Default value: "Default" .
:type operation_type: str or
~azure.mgmt.web.models.BackupRestoreOperationType
:param adjust_connection_strings: <code>true</code> if
SiteConfig.ConnectionStrings should be set in new app; otherwise,
<code>false</code>.
:type adjust_connection_strings: bool
:param hosting_environment: App Service Environment name, if needed (only
when restoring an app to an App Service Environment).
:type hosting_environment: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'storage_account_url': {'required': True},
'overwrite': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'blob_name': {'key': 'properties.blobName', 'type': 'str'},
'overwrite': {'key': 'properties.overwrite', 'type': 'bool'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'ignore_conflicting_host_names': {'key': 'properties.ignoreConflictingHostNames', 'type': 'bool'},
'ignore_databases': {'key': 'properties.ignoreDatabases', 'type': 'bool'},
'app_service_plan': {'key': 'properties.appServicePlan', 'type': 'str'},
'operation_type': {'key': 'properties.operationType', 'type': 'BackupRestoreOperationType'},
'adjust_connection_strings': {'key': 'properties.adjustConnectionStrings', 'type': 'bool'},
'hosting_environment': {'key': 'properties.hostingEnvironment', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RestoreRequest, self).__init__(**kwargs)
self.storage_account_url = kwargs.get('storage_account_url', None)
self.blob_name = kwargs.get('blob_name', None)
self.overwrite = kwargs.get('overwrite', None)
self.site_name = kwargs.get('site_name', None)
self.databases = kwargs.get('databases', None)
self.ignore_conflicting_host_names = kwargs.get('ignore_conflicting_host_names', False)
self.ignore_databases = kwargs.get('ignore_databases', False)
self.app_service_plan = kwargs.get('app_service_plan', None)
self.operation_type = kwargs.get('operation_type', "Default")
self.adjust_connection_strings = kwargs.get('adjust_connection_strings', None)
self.hosting_environment = kwargs.get('hosting_environment', None)
class ServiceSpecification(Model):
"""Resource metrics service provided by Microsoft.Insights resource provider.
:param metric_specifications:
:type metric_specifications:
list[~azure.mgmt.web.models.MetricSpecification]
:param log_specifications:
:type log_specifications: list[~azure.mgmt.web.models.LogSpecification]
"""
_attribute_map = {
'metric_specifications': {'key': 'metricSpecifications', 'type': '[MetricSpecification]'},
'log_specifications': {'key': 'logSpecifications', 'type': '[LogSpecification]'},
}
def __init__(self, **kwargs):
super(ServiceSpecification, self).__init__(**kwargs)
self.metric_specifications = kwargs.get('metric_specifications', None)
self.log_specifications = kwargs.get('log_specifications', None)
class Site(Resource):
"""A web app, a mobile app backend, or an API app.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: Resource tags.
:type tags: dict[str, str]
:ivar state: Current state of the app.
:vartype state: str
:ivar host_names: Hostnames associated with the app.
:vartype host_names: list[str]
:ivar repository_site_name: Name of the repository site.
:vartype repository_site_name: str
:ivar usage_state: State indicating whether the app has exceeded its quota
usage. Read-only. Possible values include: 'Normal', 'Exceeded'
:vartype usage_state: str or ~azure.mgmt.web.models.UsageState
:param enabled: <code>true</code> if the app is enabled; otherwise,
<code>false</code>. Setting this value to false disables the app (takes
the app offline).
:type enabled: bool
:ivar enabled_host_names: Enabled hostnames for the app.Hostnames need to
be assigned (see HostNames) AND enabled. Otherwise,
the app is not served on those hostnames.
:vartype enabled_host_names: list[str]
:ivar availability_state: Management information availability state for
the app. Possible values include: 'Normal', 'Limited',
'DisasterRecoveryMode'
:vartype availability_state: str or
~azure.mgmt.web.models.SiteAvailabilityState
:param host_name_ssl_states: Hostname SSL states are used to manage the
SSL bindings for app's hostnames.
:type host_name_ssl_states: list[~azure.mgmt.web.models.HostNameSslState]
:param server_farm_id: Resource ID of the associated App Service plan,
formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param reserved: <code>true</code> if reserved; otherwise,
<code>false</code>. Default value: False .
:type reserved: bool
:param is_xenon: Obsolete: Hyper-V sandbox. Default value: False .
:type is_xenon: bool
:param hyper_v: Hyper-V sandbox. Default value: False .
:type hyper_v: bool
:ivar last_modified_time_utc: Last time the app was modified, in UTC.
Read-only.
:vartype last_modified_time_utc: datetime
:param site_config: Configuration of the app.
:type site_config: ~azure.mgmt.web.models.SiteConfig
:ivar traffic_manager_host_names: Azure Traffic Manager hostnames
associated with the app. Read-only.
:vartype traffic_manager_host_names: list[str]
:param scm_site_also_stopped: <code>true</code> to stop SCM (KUDU) site
when the app is stopped; otherwise, <code>false</code>. The default is
<code>false</code>. Default value: False .
:type scm_site_also_stopped: bool
:ivar target_swap_slot: Specifies which deployment slot this app will swap
into. Read-only.
:vartype target_swap_slot: str
:param hosting_environment_profile: App Service Environment to use for the
app.
:type hosting_environment_profile:
~azure.mgmt.web.models.HostingEnvironmentProfile
:param client_affinity_enabled: <code>true</code> to enable client
affinity; <code>false</code> to stop sending session affinity cookies,
which route client requests in the same session to the same instance.
Default is <code>true</code>.
:type client_affinity_enabled: bool
:param client_cert_enabled: <code>true</code> to enable client certificate
authentication (TLS mutual authentication); otherwise, <code>false</code>.
Default is <code>false</code>.
:type client_cert_enabled: bool
:param client_cert_exclusion_paths: client certificate authentication
comma-separated exclusion paths
:type client_cert_exclusion_paths: str
:param host_names_disabled: <code>true</code> to disable the public
hostnames of the app; otherwise, <code>false</code>.
If <code>true</code>, the app is only accessible via API management
process.
:type host_names_disabled: bool
:ivar outbound_ip_addresses: List of IP addresses that the app uses for
outbound connections (e.g. database access). Includes VIPs from tenants
that site can be hosted with current settings. Read-only.
:vartype outbound_ip_addresses: str
:ivar possible_outbound_ip_addresses: List of IP addresses that the app
uses for outbound connections (e.g. database access). Includes VIPs from
all tenants. Read-only.
:vartype possible_outbound_ip_addresses: str
:param container_size: Size of the function container.
:type container_size: int
:param daily_memory_time_quota: Maximum allowed daily memory-time quota
(applicable on dynamic apps only).
:type daily_memory_time_quota: int
:ivar suspended_till: App suspended till in case memory-time quota is
exceeded.
:vartype suspended_till: datetime
:ivar max_number_of_workers: Maximum number of workers.
This only applies to Functions container.
:vartype max_number_of_workers: int
:param cloning_info: If specified during app creation, the app is cloned
from a source app.
:type cloning_info: ~azure.mgmt.web.models.CloningInfo
:ivar resource_group: Name of the resource group the app belongs to.
Read-only.
:vartype resource_group: str
:ivar is_default_container: <code>true</code> if the app is a default
container; otherwise, <code>false</code>.
:vartype is_default_container: bool
:ivar default_host_name: Default hostname of the app. Read-only.
:vartype default_host_name: str
:ivar slot_swap_status: Status of the last deployment slot swap operation.
:vartype slot_swap_status: ~azure.mgmt.web.models.SlotSwapStatus
:param https_only: HttpsOnly: configures a web site to accept only https
requests. Issues redirect for
http requests
:type https_only: bool
:param redundancy_mode: Site redundancy mode. Possible values include:
'None', 'Manual', 'Failover', 'ActiveActive', 'GeoRedundant'
:type redundancy_mode: str or ~azure.mgmt.web.models.RedundancyMode
:ivar in_progress_operation_id: Specifies an operation id if this site has
a pending operation.
:vartype in_progress_operation_id: str
:param geo_distributions: GeoDistributions for this site
:type geo_distributions: list[~azure.mgmt.web.models.GeoDistribution]
:param identity:
:type identity: ~azure.mgmt.web.models.ManagedServiceIdentity
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'state': {'readonly': True},
'host_names': {'readonly': True},
'repository_site_name': {'readonly': True},
'usage_state': {'readonly': True},
'enabled_host_names': {'readonly': True},
'availability_state': {'readonly': True},
'last_modified_time_utc': {'readonly': True},
'traffic_manager_host_names': {'readonly': True},
'target_swap_slot': {'readonly': True},
'outbound_ip_addresses': {'readonly': True},
'possible_outbound_ip_addresses': {'readonly': True},
'suspended_till': {'readonly': True},
'max_number_of_workers': {'readonly': True},
'resource_group': {'readonly': True},
'is_default_container': {'readonly': True},
'default_host_name': {'readonly': True},
'slot_swap_status': {'readonly': True},
'in_progress_operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'state': {'key': 'properties.state', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'repository_site_name': {'key': 'properties.repositorySiteName', 'type': 'str'},
'usage_state': {'key': 'properties.usageState', 'type': 'UsageState'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'enabled_host_names': {'key': 'properties.enabledHostNames', 'type': '[str]'},
'availability_state': {'key': 'properties.availabilityState', 'type': 'SiteAvailabilityState'},
'host_name_ssl_states': {'key': 'properties.hostNameSslStates', 'type': '[HostNameSslState]'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'is_xenon': {'key': 'properties.isXenon', 'type': 'bool'},
'hyper_v': {'key': 'properties.hyperV', 'type': 'bool'},
'last_modified_time_utc': {'key': 'properties.lastModifiedTimeUtc', 'type': 'iso-8601'},
'site_config': {'key': 'properties.siteConfig', 'type': 'SiteConfig'},
'traffic_manager_host_names': {'key': 'properties.trafficManagerHostNames', 'type': '[str]'},
'scm_site_also_stopped': {'key': 'properties.scmSiteAlsoStopped', 'type': 'bool'},
'target_swap_slot': {'key': 'properties.targetSwapSlot', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'client_affinity_enabled': {'key': 'properties.clientAffinityEnabled', 'type': 'bool'},
'client_cert_enabled': {'key': 'properties.clientCertEnabled', 'type': 'bool'},
'client_cert_exclusion_paths': {'key': 'properties.clientCertExclusionPaths', 'type': 'str'},
'host_names_disabled': {'key': 'properties.hostNamesDisabled', 'type': 'bool'},
'outbound_ip_addresses': {'key': 'properties.outboundIpAddresses', 'type': 'str'},
'possible_outbound_ip_addresses': {'key': 'properties.possibleOutboundIpAddresses', 'type': 'str'},
'container_size': {'key': 'properties.containerSize', 'type': 'int'},
'daily_memory_time_quota': {'key': 'properties.dailyMemoryTimeQuota', 'type': 'int'},
'suspended_till': {'key': 'properties.suspendedTill', 'type': 'iso-8601'},
'max_number_of_workers': {'key': 'properties.maxNumberOfWorkers', 'type': 'int'},
'cloning_info': {'key': 'properties.cloningInfo', 'type': 'CloningInfo'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'is_default_container': {'key': 'properties.isDefaultContainer', 'type': 'bool'},
'default_host_name': {'key': 'properties.defaultHostName', 'type': 'str'},
'slot_swap_status': {'key': 'properties.slotSwapStatus', 'type': 'SlotSwapStatus'},
'https_only': {'key': 'properties.httpsOnly', 'type': 'bool'},
'redundancy_mode': {'key': 'properties.redundancyMode', 'type': 'RedundancyMode'},
'in_progress_operation_id': {'key': 'properties.inProgressOperationId', 'type': 'str'},
'geo_distributions': {'key': 'properties.geoDistributions', 'type': '[GeoDistribution]'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
}
def __init__(self, **kwargs):
super(Site, self).__init__(**kwargs)
self.state = None
self.host_names = None
self.repository_site_name = None
self.usage_state = None
self.enabled = kwargs.get('enabled', None)
self.enabled_host_names = None
self.availability_state = None
self.host_name_ssl_states = kwargs.get('host_name_ssl_states', None)
self.server_farm_id = kwargs.get('server_farm_id', None)
self.reserved = kwargs.get('reserved', False)
self.is_xenon = kwargs.get('is_xenon', False)
self.hyper_v = kwargs.get('hyper_v', False)
self.last_modified_time_utc = None
self.site_config = kwargs.get('site_config', None)
self.traffic_manager_host_names = None
self.scm_site_also_stopped = kwargs.get('scm_site_also_stopped', False)
self.target_swap_slot = None
self.hosting_environment_profile = kwargs.get('hosting_environment_profile', None)
self.client_affinity_enabled = kwargs.get('client_affinity_enabled', None)
self.client_cert_enabled = kwargs.get('client_cert_enabled', None)
self.client_cert_exclusion_paths = kwargs.get('client_cert_exclusion_paths', None)
self.host_names_disabled = kwargs.get('host_names_disabled', None)
self.outbound_ip_addresses = None
self.possible_outbound_ip_addresses = None
self.container_size = kwargs.get('container_size', None)
self.daily_memory_time_quota = kwargs.get('daily_memory_time_quota', None)
self.suspended_till = None
self.max_number_of_workers = None
self.cloning_info = kwargs.get('cloning_info', None)
self.resource_group = None
self.is_default_container = None
self.default_host_name = None
self.slot_swap_status = None
self.https_only = kwargs.get('https_only', None)
self.redundancy_mode = kwargs.get('redundancy_mode', None)
self.in_progress_operation_id = None
self.geo_distributions = kwargs.get('geo_distributions', None)
self.identity = kwargs.get('identity', None)
class SiteAuthSettings(ProxyOnlyResource):
"""Configuration settings for the Azure App Service Authentication /
Authorization feature.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param enabled: <code>true</code> if the Authentication / Authorization
feature is enabled for the current app; otherwise, <code>false</code>.
:type enabled: bool
:param runtime_version: The RuntimeVersion of the Authentication /
Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in
the Authentication / Authorization module.
:type runtime_version: str
:param unauthenticated_client_action: The action to take when an
unauthenticated client attempts to access the app. Possible values
include: 'RedirectToLoginPage', 'AllowAnonymous'
:type unauthenticated_client_action: str or
~azure.mgmt.web.models.UnauthenticatedClientAction
:param token_store_enabled: <code>true</code> to durably store
platform-specific security tokens that are obtained during login flows;
otherwise, <code>false</code>.
The default is <code>false</code>.
:type token_store_enabled: bool
:param allowed_external_redirect_urls: External URLs that can be
redirected to as part of logging in or logging out of the app. Note that
the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store
application backends.
Note that URLs within the current domain are always implicitly allowed.
:type allowed_external_redirect_urls: list[str]
:param default_provider: The default authentication provider to use when
multiple providers are configured.
This setting is only needed if multiple providers are configured and the
unauthenticated client
action is set to "RedirectToLoginPage". Possible values include:
'AzureActiveDirectory', 'Facebook', 'Google', 'MicrosoftAccount',
'Twitter'
:type default_provider: str or
~azure.mgmt.web.models.BuiltInAuthenticationProvider
:param token_refresh_extension_hours: The number of hours after session
token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
:type token_refresh_extension_hours: float
:param client_id: The Client ID of this relying party application, known
as the client_id.
This setting is required for enabling OpenID Connection authentication
with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect:
http://openid.net/specs/openid-connect-core-1_0.html
:type client_id: str
:param client_secret: The Client Secret of this relying party application
(in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID
Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to
authenticate end users.
More information on OpenID Connect:
http://openid.net/specs/openid-connect-core-1_0.html
:type client_secret: str
:param client_secret_certificate_thumbprint: An alternative to the client
secret, that is the thumbprint of a certificate used for signing purposes.
This property acts as
a replacement for the Client Secret. It is also optional.
:type client_secret_certificate_thumbprint: str
:param issuer: The OpenID Connect Issuer URI that represents the entity
which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory
tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery:
http://openid.net/specs/openid-connect-discovery-1_0.html
:type issuer: str
:param validate_issuer: Gets a value indicating whether the issuer should
be a valid HTTPS url and be validated as such.
:type validate_issuer: bool
:param allowed_audiences: Allowed audience values to consider when
validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is
always considered an
allowed audience, regardless of this setting.
:type allowed_audiences: list[str]
:param additional_login_params: Login parameters to send to the OpenID
Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
:type additional_login_params: list[str]
:param google_client_id: The OpenID Connect Client ID for the Google web
application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation:
https://developers.google.com/identity/sign-in/web/
:type google_client_id: str
:param google_client_secret: The client secret associated with the Google
web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation:
https://developers.google.com/identity/sign-in/web/
:type google_client_secret: str
:param google_oauth_scopes: The OAuth 2.0 scopes that will be requested as
part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and
"email" are used as default scopes.
Google Sign-In documentation:
https://developers.google.com/identity/sign-in/web/
:type google_oauth_scopes: list[str]
:param facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation:
https://developers.facebook.com/docs/facebook-login
:type facebook_app_id: str
:param facebook_app_secret: The App Secret of the Facebook app used for
Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation:
https://developers.facebook.com/docs/facebook-login
:type facebook_app_secret: str
:param facebook_oauth_scopes: The OAuth 2.0 scopes that will be requested
as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation:
https://developers.facebook.com/docs/facebook-login
:type facebook_oauth_scopes: list[str]
:param twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter
application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:type twitter_consumer_key: str
:param twitter_consumer_secret: The OAuth 1.0a consumer secret of the
Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:type twitter_consumer_secret: str
:param microsoft_account_client_id: The OAuth 2.0 client ID that was
created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation:
https://dev.onedrive.com/auth/msa_oauth.htm
:type microsoft_account_client_id: str
:param microsoft_account_client_secret: The OAuth 2.0 client secret that
was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation:
https://dev.onedrive.com/auth/msa_oauth.htm
:type microsoft_account_client_secret: str
:param microsoft_account_oauth_scopes: The OAuth 2.0 scopes that will be
requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the
default scope.
Microsoft Account Scopes and permissions documentation:
https://msdn.microsoft.com/en-us/library/dn631845.aspx
:type microsoft_account_oauth_scopes: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'runtime_version': {'key': 'properties.runtimeVersion', 'type': 'str'},
'unauthenticated_client_action': {'key': 'properties.unauthenticatedClientAction', 'type': 'UnauthenticatedClientAction'},
'token_store_enabled': {'key': 'properties.tokenStoreEnabled', 'type': 'bool'},
'allowed_external_redirect_urls': {'key': 'properties.allowedExternalRedirectUrls', 'type': '[str]'},
'default_provider': {'key': 'properties.defaultProvider', 'type': 'BuiltInAuthenticationProvider'},
'token_refresh_extension_hours': {'key': 'properties.tokenRefreshExtensionHours', 'type': 'float'},
'client_id': {'key': 'properties.clientId', 'type': 'str'},
'client_secret': {'key': 'properties.clientSecret', 'type': 'str'},
'client_secret_certificate_thumbprint': {'key': 'properties.clientSecretCertificateThumbprint', 'type': 'str'},
'issuer': {'key': 'properties.issuer', 'type': 'str'},
'validate_issuer': {'key': 'properties.validateIssuer', 'type': 'bool'},
'allowed_audiences': {'key': 'properties.allowedAudiences', 'type': '[str]'},
'additional_login_params': {'key': 'properties.additionalLoginParams', 'type': '[str]'},
'google_client_id': {'key': 'properties.googleClientId', 'type': 'str'},
'google_client_secret': {'key': 'properties.googleClientSecret', 'type': 'str'},
'google_oauth_scopes': {'key': 'properties.googleOAuthScopes', 'type': '[str]'},
'facebook_app_id': {'key': 'properties.facebookAppId', 'type': 'str'},
'facebook_app_secret': {'key': 'properties.facebookAppSecret', 'type': 'str'},
'facebook_oauth_scopes': {'key': 'properties.facebookOAuthScopes', 'type': '[str]'},
'twitter_consumer_key': {'key': 'properties.twitterConsumerKey', 'type': 'str'},
'twitter_consumer_secret': {'key': 'properties.twitterConsumerSecret', 'type': 'str'},
'microsoft_account_client_id': {'key': 'properties.microsoftAccountClientId', 'type': 'str'},
'microsoft_account_client_secret': {'key': 'properties.microsoftAccountClientSecret', 'type': 'str'},
'microsoft_account_oauth_scopes': {'key': 'properties.microsoftAccountOAuthScopes', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(SiteAuthSettings, self).__init__(**kwargs)
self.enabled = kwargs.get('enabled', None)
self.runtime_version = kwargs.get('runtime_version', None)
self.unauthenticated_client_action = kwargs.get('unauthenticated_client_action', None)
self.token_store_enabled = kwargs.get('token_store_enabled', None)
self.allowed_external_redirect_urls = kwargs.get('allowed_external_redirect_urls', None)
self.default_provider = kwargs.get('default_provider', None)
self.token_refresh_extension_hours = kwargs.get('token_refresh_extension_hours', None)
self.client_id = kwargs.get('client_id', None)
self.client_secret = kwargs.get('client_secret', None)
self.client_secret_certificate_thumbprint = kwargs.get('client_secret_certificate_thumbprint', None)
self.issuer = kwargs.get('issuer', None)
self.validate_issuer = kwargs.get('validate_issuer', None)
self.allowed_audiences = kwargs.get('allowed_audiences', None)
self.additional_login_params = kwargs.get('additional_login_params', None)
self.google_client_id = kwargs.get('google_client_id', None)
self.google_client_secret = kwargs.get('google_client_secret', None)
self.google_oauth_scopes = kwargs.get('google_oauth_scopes', None)
self.facebook_app_id = kwargs.get('facebook_app_id', None)
self.facebook_app_secret = kwargs.get('facebook_app_secret', None)
self.facebook_oauth_scopes = kwargs.get('facebook_oauth_scopes', None)
self.twitter_consumer_key = kwargs.get('twitter_consumer_key', None)
self.twitter_consumer_secret = kwargs.get('twitter_consumer_secret', None)
self.microsoft_account_client_id = kwargs.get('microsoft_account_client_id', None)
self.microsoft_account_client_secret = kwargs.get('microsoft_account_client_secret', None)
self.microsoft_account_oauth_scopes = kwargs.get('microsoft_account_oauth_scopes', None)
class SiteCloneability(Model):
"""Represents whether or not an app is cloneable.
:param result: Name of app. Possible values include: 'Cloneable',
'PartiallyCloneable', 'NotCloneable'
:type result: str or ~azure.mgmt.web.models.CloneAbilityResult
:param blocking_features: List of features enabled on app that prevent
cloning.
:type blocking_features:
list[~azure.mgmt.web.models.SiteCloneabilityCriterion]
:param unsupported_features: List of features enabled on app that are
non-blocking but cannot be cloned. The app can still be cloned
but the features in this list will not be set up on cloned app.
:type unsupported_features:
list[~azure.mgmt.web.models.SiteCloneabilityCriterion]
:param blocking_characteristics: List of blocking application
characteristics.
:type blocking_characteristics:
list[~azure.mgmt.web.models.SiteCloneabilityCriterion]
"""
_attribute_map = {
'result': {'key': 'result', 'type': 'CloneAbilityResult'},
'blocking_features': {'key': 'blockingFeatures', 'type': '[SiteCloneabilityCriterion]'},
'unsupported_features': {'key': 'unsupportedFeatures', 'type': '[SiteCloneabilityCriterion]'},
'blocking_characteristics': {'key': 'blockingCharacteristics', 'type': '[SiteCloneabilityCriterion]'},
}
def __init__(self, **kwargs):
super(SiteCloneability, self).__init__(**kwargs)
self.result = kwargs.get('result', None)
self.blocking_features = kwargs.get('blocking_features', None)
self.unsupported_features = kwargs.get('unsupported_features', None)
self.blocking_characteristics = kwargs.get('blocking_characteristics', None)
class SiteCloneabilityCriterion(Model):
"""An app cloneability criterion.
:param name: Name of criterion.
:type name: str
:param description: Description of criterion.
:type description: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SiteCloneabilityCriterion, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.description = kwargs.get('description', None)
class SiteConfig(Model):
"""Configuration of an App Service app.
Variables are only populated by the server, and will be ignored when
sending a request.
:param number_of_workers: Number of workers.
:type number_of_workers: int
:param default_documents: Default documents.
:type default_documents: list[str]
:param net_framework_version: .NET Framework version. Default value:
"v4.6" .
:type net_framework_version: str
:param php_version: Version of PHP.
:type php_version: str
:param python_version: Version of Python.
:type python_version: str
:param node_version: Version of Node.js.
:type node_version: str
:param linux_fx_version: Linux App Framework and version
:type linux_fx_version: str
:param windows_fx_version: Xenon App Framework and version
:type windows_fx_version: str
:param request_tracing_enabled: <code>true</code> if request tracing is
enabled; otherwise, <code>false</code>.
:type request_tracing_enabled: bool
:param request_tracing_expiration_time: Request tracing expiration time.
:type request_tracing_expiration_time: datetime
:param remote_debugging_enabled: <code>true</code> if remote debugging is
enabled; otherwise, <code>false</code>.
:type remote_debugging_enabled: bool
:param remote_debugging_version: Remote debugging version.
:type remote_debugging_version: str
:param http_logging_enabled: <code>true</code> if HTTP logging is enabled;
otherwise, <code>false</code>.
:type http_logging_enabled: bool
:param logs_directory_size_limit: HTTP logs directory size limit.
:type logs_directory_size_limit: int
:param detailed_error_logging_enabled: <code>true</code> if detailed error
logging is enabled; otherwise, <code>false</code>.
:type detailed_error_logging_enabled: bool
:param publishing_username: Publishing user name.
:type publishing_username: str
:param app_settings: Application settings.
:type app_settings: list[~azure.mgmt.web.models.NameValuePair]
:param azure_storage_accounts: User-provided Azure storage accounts.
:type azure_storage_accounts: dict[str,
~azure.mgmt.web.models.AzureStorageInfoValue]
:param connection_strings: Connection strings.
:type connection_strings: list[~azure.mgmt.web.models.ConnStringInfo]
:ivar machine_key: Site MachineKey.
:vartype machine_key: ~azure.mgmt.web.models.SiteMachineKey
:param handler_mappings: Handler mappings.
:type handler_mappings: list[~azure.mgmt.web.models.HandlerMapping]
:param document_root: Document root.
:type document_root: str
:param scm_type: SCM type. Possible values include: 'None', 'Dropbox',
'Tfs', 'LocalGit', 'GitHub', 'CodePlexGit', 'CodePlexHg', 'BitbucketGit',
'BitbucketHg', 'ExternalGit', 'ExternalHg', 'OneDrive', 'VSO'
:type scm_type: str or ~azure.mgmt.web.models.ScmType
:param use32_bit_worker_process: <code>true</code> to use 32-bit worker
process; otherwise, <code>false</code>.
:type use32_bit_worker_process: bool
:param web_sockets_enabled: <code>true</code> if WebSocket is enabled;
otherwise, <code>false</code>.
:type web_sockets_enabled: bool
:param always_on: <code>true</code> if Always On is enabled; otherwise,
<code>false</code>.
:type always_on: bool
:param java_version: Java version.
:type java_version: str
:param java_container: Java container.
:type java_container: str
:param java_container_version: Java container version.
:type java_container_version: str
:param app_command_line: App command line to launch.
:type app_command_line: str
:param managed_pipeline_mode: Managed pipeline mode. Possible values
include: 'Integrated', 'Classic'
:type managed_pipeline_mode: str or
~azure.mgmt.web.models.ManagedPipelineMode
:param virtual_applications: Virtual applications.
:type virtual_applications:
list[~azure.mgmt.web.models.VirtualApplication]
:param load_balancing: Site load balancing. Possible values include:
'WeightedRoundRobin', 'LeastRequests', 'LeastResponseTime',
'WeightedTotalTraffic', 'RequestHash'
:type load_balancing: str or ~azure.mgmt.web.models.SiteLoadBalancing
:param experiments: This is work around for polymorphic types.
:type experiments: ~azure.mgmt.web.models.Experiments
:param limits: Site limits.
:type limits: ~azure.mgmt.web.models.SiteLimits
:param auto_heal_enabled: <code>true</code> if Auto Heal is enabled;
otherwise, <code>false</code>.
:type auto_heal_enabled: bool
:param auto_heal_rules: Auto Heal rules.
:type auto_heal_rules: ~azure.mgmt.web.models.AutoHealRules
:param tracing_options: Tracing options.
:type tracing_options: str
:param vnet_name: Virtual Network name.
:type vnet_name: str
:param cors: Cross-Origin Resource Sharing (CORS) settings.
:type cors: ~azure.mgmt.web.models.CorsSettings
:param push: Push endpoint settings.
:type push: ~azure.mgmt.web.models.PushSettings
:param api_definition: Information about the formal API definition for the
app.
:type api_definition: ~azure.mgmt.web.models.ApiDefinitionInfo
:param auto_swap_slot_name: Auto-swap slot name.
:type auto_swap_slot_name: str
:param local_my_sql_enabled: <code>true</code> to enable local MySQL;
otherwise, <code>false</code>. Default value: False .
:type local_my_sql_enabled: bool
:param managed_service_identity_id: Managed Service Identity Id
:type managed_service_identity_id: int
:param x_managed_service_identity_id: Explicit Managed Service Identity Id
:type x_managed_service_identity_id: int
:param ip_security_restrictions: IP security restrictions for main.
:type ip_security_restrictions:
list[~azure.mgmt.web.models.IpSecurityRestriction]
:param scm_ip_security_restrictions: IP security restrictions for scm.
:type scm_ip_security_restrictions:
list[~azure.mgmt.web.models.IpSecurityRestriction]
:param scm_ip_security_restrictions_use_main: IP security restrictions for
scm to use main.
:type scm_ip_security_restrictions_use_main: bool
:param http20_enabled: Http20Enabled: configures a web site to allow
clients to connect over http2.0. Default value: True .
:type http20_enabled: bool
:param min_tls_version: MinTlsVersion: configures the minimum version of
TLS required for SSL requests. Possible values include: '1.0', '1.1',
'1.2'
:type min_tls_version: str or ~azure.mgmt.web.models.SupportedTlsVersions
:param ftps_state: State of FTP / FTPS service. Possible values include:
'AllAllowed', 'FtpsOnly', 'Disabled'
:type ftps_state: str or ~azure.mgmt.web.models.FtpsState
:param reserved_instance_count: Number of reserved instances.
This setting only applies to the Consumption Plan
:type reserved_instance_count: int
"""
_validation = {
'machine_key': {'readonly': True},
'reserved_instance_count': {'maximum': 10, 'minimum': 0},
}
_attribute_map = {
'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'},
'default_documents': {'key': 'defaultDocuments', 'type': '[str]'},
'net_framework_version': {'key': 'netFrameworkVersion', 'type': 'str'},
'php_version': {'key': 'phpVersion', 'type': 'str'},
'python_version': {'key': 'pythonVersion', 'type': 'str'},
'node_version': {'key': 'nodeVersion', 'type': 'str'},
'linux_fx_version': {'key': 'linuxFxVersion', 'type': 'str'},
'windows_fx_version': {'key': 'windowsFxVersion', 'type': 'str'},
'request_tracing_enabled': {'key': 'requestTracingEnabled', 'type': 'bool'},
'request_tracing_expiration_time': {'key': 'requestTracingExpirationTime', 'type': 'iso-8601'},
'remote_debugging_enabled': {'key': 'remoteDebuggingEnabled', 'type': 'bool'},
'remote_debugging_version': {'key': 'remoteDebuggingVersion', 'type': 'str'},
'http_logging_enabled': {'key': 'httpLoggingEnabled', 'type': 'bool'},
'logs_directory_size_limit': {'key': 'logsDirectorySizeLimit', 'type': 'int'},
'detailed_error_logging_enabled': {'key': 'detailedErrorLoggingEnabled', 'type': 'bool'},
'publishing_username': {'key': 'publishingUsername', 'type': 'str'},
'app_settings': {'key': 'appSettings', 'type': '[NameValuePair]'},
'azure_storage_accounts': {'key': 'azureStorageAccounts', 'type': '{AzureStorageInfoValue}'},
'connection_strings': {'key': 'connectionStrings', 'type': '[ConnStringInfo]'},
'machine_key': {'key': 'machineKey', 'type': 'SiteMachineKey'},
'handler_mappings': {'key': 'handlerMappings', 'type': '[HandlerMapping]'},
'document_root': {'key': 'documentRoot', 'type': 'str'},
'scm_type': {'key': 'scmType', 'type': 'str'},
'use32_bit_worker_process': {'key': 'use32BitWorkerProcess', 'type': 'bool'},
'web_sockets_enabled': {'key': 'webSocketsEnabled', 'type': 'bool'},
'always_on': {'key': 'alwaysOn', 'type': 'bool'},
'java_version': {'key': 'javaVersion', 'type': 'str'},
'java_container': {'key': 'javaContainer', 'type': 'str'},
'java_container_version': {'key': 'javaContainerVersion', 'type': 'str'},
'app_command_line': {'key': 'appCommandLine', 'type': 'str'},
'managed_pipeline_mode': {'key': 'managedPipelineMode', 'type': 'ManagedPipelineMode'},
'virtual_applications': {'key': 'virtualApplications', 'type': '[VirtualApplication]'},
'load_balancing': {'key': 'loadBalancing', 'type': 'SiteLoadBalancing'},
'experiments': {'key': 'experiments', 'type': 'Experiments'},
'limits': {'key': 'limits', 'type': 'SiteLimits'},
'auto_heal_enabled': {'key': 'autoHealEnabled', 'type': 'bool'},
'auto_heal_rules': {'key': 'autoHealRules', 'type': 'AutoHealRules'},
'tracing_options': {'key': 'tracingOptions', 'type': 'str'},
'vnet_name': {'key': 'vnetName', 'type': 'str'},
'cors': {'key': 'cors', 'type': 'CorsSettings'},
'push': {'key': 'push', 'type': 'PushSettings'},
'api_definition': {'key': 'apiDefinition', 'type': 'ApiDefinitionInfo'},
'auto_swap_slot_name': {'key': 'autoSwapSlotName', 'type': 'str'},
'local_my_sql_enabled': {'key': 'localMySqlEnabled', 'type': 'bool'},
'managed_service_identity_id': {'key': 'managedServiceIdentityId', 'type': 'int'},
'x_managed_service_identity_id': {'key': 'xManagedServiceIdentityId', 'type': 'int'},
'ip_security_restrictions': {'key': 'ipSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'scm_ip_security_restrictions': {'key': 'scmIpSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'scm_ip_security_restrictions_use_main': {'key': 'scmIpSecurityRestrictionsUseMain', 'type': 'bool'},
'http20_enabled': {'key': 'http20Enabled', 'type': 'bool'},
'min_tls_version': {'key': 'minTlsVersion', 'type': 'str'},
'ftps_state': {'key': 'ftpsState', 'type': 'str'},
'reserved_instance_count': {'key': 'reservedInstanceCount', 'type': 'int'},
}
def __init__(self, **kwargs):
super(SiteConfig, self).__init__(**kwargs)
self.number_of_workers = kwargs.get('number_of_workers', None)
self.default_documents = kwargs.get('default_documents', None)
self.net_framework_version = kwargs.get('net_framework_version', "v4.6")
self.php_version = kwargs.get('php_version', None)
self.python_version = kwargs.get('python_version', None)
self.node_version = kwargs.get('node_version', None)
self.linux_fx_version = kwargs.get('linux_fx_version', None)
self.windows_fx_version = kwargs.get('windows_fx_version', None)
self.request_tracing_enabled = kwargs.get('request_tracing_enabled', None)
self.request_tracing_expiration_time = kwargs.get('request_tracing_expiration_time', None)
self.remote_debugging_enabled = kwargs.get('remote_debugging_enabled', None)
self.remote_debugging_version = kwargs.get('remote_debugging_version', None)
self.http_logging_enabled = kwargs.get('http_logging_enabled', None)
self.logs_directory_size_limit = kwargs.get('logs_directory_size_limit', None)
self.detailed_error_logging_enabled = kwargs.get('detailed_error_logging_enabled', None)
self.publishing_username = kwargs.get('publishing_username', None)
self.app_settings = kwargs.get('app_settings', None)
self.azure_storage_accounts = kwargs.get('azure_storage_accounts', None)
self.connection_strings = kwargs.get('connection_strings', None)
self.machine_key = None
self.handler_mappings = kwargs.get('handler_mappings', None)
self.document_root = kwargs.get('document_root', None)
self.scm_type = kwargs.get('scm_type', None)
self.use32_bit_worker_process = kwargs.get('use32_bit_worker_process', None)
self.web_sockets_enabled = kwargs.get('web_sockets_enabled', None)
self.always_on = kwargs.get('always_on', None)
self.java_version = kwargs.get('java_version', None)
self.java_container = kwargs.get('java_container', None)
self.java_container_version = kwargs.get('java_container_version', None)
self.app_command_line = kwargs.get('app_command_line', None)
self.managed_pipeline_mode = kwargs.get('managed_pipeline_mode', None)
self.virtual_applications = kwargs.get('virtual_applications', None)
self.load_balancing = kwargs.get('load_balancing', None)
self.experiments = kwargs.get('experiments', None)
self.limits = kwargs.get('limits', None)
self.auto_heal_enabled = kwargs.get('auto_heal_enabled', None)
self.auto_heal_rules = kwargs.get('auto_heal_rules', None)
self.tracing_options = kwargs.get('tracing_options', None)
self.vnet_name = kwargs.get('vnet_name', None)
self.cors = kwargs.get('cors', None)
self.push = kwargs.get('push', None)
self.api_definition = kwargs.get('api_definition', None)
self.auto_swap_slot_name = kwargs.get('auto_swap_slot_name', None)
self.local_my_sql_enabled = kwargs.get('local_my_sql_enabled', False)
self.managed_service_identity_id = kwargs.get('managed_service_identity_id', None)
self.x_managed_service_identity_id = kwargs.get('x_managed_service_identity_id', None)
self.ip_security_restrictions = kwargs.get('ip_security_restrictions', None)
self.scm_ip_security_restrictions = kwargs.get('scm_ip_security_restrictions', None)
self.scm_ip_security_restrictions_use_main = kwargs.get('scm_ip_security_restrictions_use_main', None)
self.http20_enabled = kwargs.get('http20_enabled', True)
self.min_tls_version = kwargs.get('min_tls_version', None)
self.ftps_state = kwargs.get('ftps_state', None)
self.reserved_instance_count = kwargs.get('reserved_instance_count', None)
class SiteConfigResource(ProxyOnlyResource):
"""Web app configuration ARM resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param number_of_workers: Number of workers.
:type number_of_workers: int
:param default_documents: Default documents.
:type default_documents: list[str]
:param net_framework_version: .NET Framework version. Default value:
"v4.6" .
:type net_framework_version: str
:param php_version: Version of PHP.
:type php_version: str
:param python_version: Version of Python.
:type python_version: str
:param node_version: Version of Node.js.
:type node_version: str
:param linux_fx_version: Linux App Framework and version
:type linux_fx_version: str
:param windows_fx_version: Xenon App Framework and version
:type windows_fx_version: str
:param request_tracing_enabled: <code>true</code> if request tracing is
enabled; otherwise, <code>false</code>.
:type request_tracing_enabled: bool
:param request_tracing_expiration_time: Request tracing expiration time.
:type request_tracing_expiration_time: datetime
:param remote_debugging_enabled: <code>true</code> if remote debugging is
enabled; otherwise, <code>false</code>.
:type remote_debugging_enabled: bool
:param remote_debugging_version: Remote debugging version.
:type remote_debugging_version: str
:param http_logging_enabled: <code>true</code> if HTTP logging is enabled;
otherwise, <code>false</code>.
:type http_logging_enabled: bool
:param logs_directory_size_limit: HTTP logs directory size limit.
:type logs_directory_size_limit: int
:param detailed_error_logging_enabled: <code>true</code> if detailed error
logging is enabled; otherwise, <code>false</code>.
:type detailed_error_logging_enabled: bool
:param publishing_username: Publishing user name.
:type publishing_username: str
:param app_settings: Application settings.
:type app_settings: list[~azure.mgmt.web.models.NameValuePair]
:param azure_storage_accounts: User-provided Azure storage accounts.
:type azure_storage_accounts: dict[str,
~azure.mgmt.web.models.AzureStorageInfoValue]
:param connection_strings: Connection strings.
:type connection_strings: list[~azure.mgmt.web.models.ConnStringInfo]
:ivar machine_key: Site MachineKey.
:vartype machine_key: ~azure.mgmt.web.models.SiteMachineKey
:param handler_mappings: Handler mappings.
:type handler_mappings: list[~azure.mgmt.web.models.HandlerMapping]
:param document_root: Document root.
:type document_root: str
:param scm_type: SCM type. Possible values include: 'None', 'Dropbox',
'Tfs', 'LocalGit', 'GitHub', 'CodePlexGit', 'CodePlexHg', 'BitbucketGit',
'BitbucketHg', 'ExternalGit', 'ExternalHg', 'OneDrive', 'VSO'
:type scm_type: str or ~azure.mgmt.web.models.ScmType
:param use32_bit_worker_process: <code>true</code> to use 32-bit worker
process; otherwise, <code>false</code>.
:type use32_bit_worker_process: bool
:param web_sockets_enabled: <code>true</code> if WebSocket is enabled;
otherwise, <code>false</code>.
:type web_sockets_enabled: bool
:param always_on: <code>true</code> if Always On is enabled; otherwise,
<code>false</code>.
:type always_on: bool
:param java_version: Java version.
:type java_version: str
:param java_container: Java container.
:type java_container: str
:param java_container_version: Java container version.
:type java_container_version: str
:param app_command_line: App command line to launch.
:type app_command_line: str
:param managed_pipeline_mode: Managed pipeline mode. Possible values
include: 'Integrated', 'Classic'
:type managed_pipeline_mode: str or
~azure.mgmt.web.models.ManagedPipelineMode
:param virtual_applications: Virtual applications.
:type virtual_applications:
list[~azure.mgmt.web.models.VirtualApplication]
:param load_balancing: Site load balancing. Possible values include:
'WeightedRoundRobin', 'LeastRequests', 'LeastResponseTime',
'WeightedTotalTraffic', 'RequestHash'
:type load_balancing: str or ~azure.mgmt.web.models.SiteLoadBalancing
:param experiments: This is work around for polymorphic types.
:type experiments: ~azure.mgmt.web.models.Experiments
:param limits: Site limits.
:type limits: ~azure.mgmt.web.models.SiteLimits
:param auto_heal_enabled: <code>true</code> if Auto Heal is enabled;
otherwise, <code>false</code>.
:type auto_heal_enabled: bool
:param auto_heal_rules: Auto Heal rules.
:type auto_heal_rules: ~azure.mgmt.web.models.AutoHealRules
:param tracing_options: Tracing options.
:type tracing_options: str
:param vnet_name: Virtual Network name.
:type vnet_name: str
:param cors: Cross-Origin Resource Sharing (CORS) settings.
:type cors: ~azure.mgmt.web.models.CorsSettings
:param push: Push endpoint settings.
:type push: ~azure.mgmt.web.models.PushSettings
:param api_definition: Information about the formal API definition for the
app.
:type api_definition: ~azure.mgmt.web.models.ApiDefinitionInfo
:param auto_swap_slot_name: Auto-swap slot name.
:type auto_swap_slot_name: str
:param local_my_sql_enabled: <code>true</code> to enable local MySQL;
otherwise, <code>false</code>. Default value: False .
:type local_my_sql_enabled: bool
:param managed_service_identity_id: Managed Service Identity Id
:type managed_service_identity_id: int
:param x_managed_service_identity_id: Explicit Managed Service Identity Id
:type x_managed_service_identity_id: int
:param ip_security_restrictions: IP security restrictions for main.
:type ip_security_restrictions:
list[~azure.mgmt.web.models.IpSecurityRestriction]
:param scm_ip_security_restrictions: IP security restrictions for scm.
:type scm_ip_security_restrictions:
list[~azure.mgmt.web.models.IpSecurityRestriction]
:param scm_ip_security_restrictions_use_main: IP security restrictions for
scm to use main.
:type scm_ip_security_restrictions_use_main: bool
:param http20_enabled: Http20Enabled: configures a web site to allow
clients to connect over http2.0. Default value: True .
:type http20_enabled: bool
:param min_tls_version: MinTlsVersion: configures the minimum version of
TLS required for SSL requests. Possible values include: '1.0', '1.1',
'1.2'
:type min_tls_version: str or ~azure.mgmt.web.models.SupportedTlsVersions
:param ftps_state: State of FTP / FTPS service. Possible values include:
'AllAllowed', 'FtpsOnly', 'Disabled'
:type ftps_state: str or ~azure.mgmt.web.models.FtpsState
:param reserved_instance_count: Number of reserved instances.
This setting only applies to the Consumption Plan
:type reserved_instance_count: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'machine_key': {'readonly': True},
'reserved_instance_count': {'maximum': 10, 'minimum': 0},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'number_of_workers': {'key': 'properties.numberOfWorkers', 'type': 'int'},
'default_documents': {'key': 'properties.defaultDocuments', 'type': '[str]'},
'net_framework_version': {'key': 'properties.netFrameworkVersion', 'type': 'str'},
'php_version': {'key': 'properties.phpVersion', 'type': 'str'},
'python_version': {'key': 'properties.pythonVersion', 'type': 'str'},
'node_version': {'key': 'properties.nodeVersion', 'type': 'str'},
'linux_fx_version': {'key': 'properties.linuxFxVersion', 'type': 'str'},
'windows_fx_version': {'key': 'properties.windowsFxVersion', 'type': 'str'},
'request_tracing_enabled': {'key': 'properties.requestTracingEnabled', 'type': 'bool'},
'request_tracing_expiration_time': {'key': 'properties.requestTracingExpirationTime', 'type': 'iso-8601'},
'remote_debugging_enabled': {'key': 'properties.remoteDebuggingEnabled', 'type': 'bool'},
'remote_debugging_version': {'key': 'properties.remoteDebuggingVersion', 'type': 'str'},
'http_logging_enabled': {'key': 'properties.httpLoggingEnabled', 'type': 'bool'},
'logs_directory_size_limit': {'key': 'properties.logsDirectorySizeLimit', 'type': 'int'},
'detailed_error_logging_enabled': {'key': 'properties.detailedErrorLoggingEnabled', 'type': 'bool'},
'publishing_username': {'key': 'properties.publishingUsername', 'type': 'str'},
'app_settings': {'key': 'properties.appSettings', 'type': '[NameValuePair]'},
'azure_storage_accounts': {'key': 'properties.azureStorageAccounts', 'type': '{AzureStorageInfoValue}'},
'connection_strings': {'key': 'properties.connectionStrings', 'type': '[ConnStringInfo]'},
'machine_key': {'key': 'properties.machineKey', 'type': 'SiteMachineKey'},
'handler_mappings': {'key': 'properties.handlerMappings', 'type': '[HandlerMapping]'},
'document_root': {'key': 'properties.documentRoot', 'type': 'str'},
'scm_type': {'key': 'properties.scmType', 'type': 'str'},
'use32_bit_worker_process': {'key': 'properties.use32BitWorkerProcess', 'type': 'bool'},
'web_sockets_enabled': {'key': 'properties.webSocketsEnabled', 'type': 'bool'},
'always_on': {'key': 'properties.alwaysOn', 'type': 'bool'},
'java_version': {'key': 'properties.javaVersion', 'type': 'str'},
'java_container': {'key': 'properties.javaContainer', 'type': 'str'},
'java_container_version': {'key': 'properties.javaContainerVersion', 'type': 'str'},
'app_command_line': {'key': 'properties.appCommandLine', 'type': 'str'},
'managed_pipeline_mode': {'key': 'properties.managedPipelineMode', 'type': 'ManagedPipelineMode'},
'virtual_applications': {'key': 'properties.virtualApplications', 'type': '[VirtualApplication]'},
'load_balancing': {'key': 'properties.loadBalancing', 'type': 'SiteLoadBalancing'},
'experiments': {'key': 'properties.experiments', 'type': 'Experiments'},
'limits': {'key': 'properties.limits', 'type': 'SiteLimits'},
'auto_heal_enabled': {'key': 'properties.autoHealEnabled', 'type': 'bool'},
'auto_heal_rules': {'key': 'properties.autoHealRules', 'type': 'AutoHealRules'},
'tracing_options': {'key': 'properties.tracingOptions', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'cors': {'key': 'properties.cors', 'type': 'CorsSettings'},
'push': {'key': 'properties.push', 'type': 'PushSettings'},
'api_definition': {'key': 'properties.apiDefinition', 'type': 'ApiDefinitionInfo'},
'auto_swap_slot_name': {'key': 'properties.autoSwapSlotName', 'type': 'str'},
'local_my_sql_enabled': {'key': 'properties.localMySqlEnabled', 'type': 'bool'},
'managed_service_identity_id': {'key': 'properties.managedServiceIdentityId', 'type': 'int'},
'x_managed_service_identity_id': {'key': 'properties.xManagedServiceIdentityId', 'type': 'int'},
'ip_security_restrictions': {'key': 'properties.ipSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'scm_ip_security_restrictions': {'key': 'properties.scmIpSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'scm_ip_security_restrictions_use_main': {'key': 'properties.scmIpSecurityRestrictionsUseMain', 'type': 'bool'},
'http20_enabled': {'key': 'properties.http20Enabled', 'type': 'bool'},
'min_tls_version': {'key': 'properties.minTlsVersion', 'type': 'str'},
'ftps_state': {'key': 'properties.ftpsState', 'type': 'str'},
'reserved_instance_count': {'key': 'properties.reservedInstanceCount', 'type': 'int'},
}
def __init__(self, **kwargs):
super(SiteConfigResource, self).__init__(**kwargs)
self.number_of_workers = kwargs.get('number_of_workers', None)
self.default_documents = kwargs.get('default_documents', None)
self.net_framework_version = kwargs.get('net_framework_version', "v4.6")
self.php_version = kwargs.get('php_version', None)
self.python_version = kwargs.get('python_version', None)
self.node_version = kwargs.get('node_version', None)
self.linux_fx_version = kwargs.get('linux_fx_version', None)
self.windows_fx_version = kwargs.get('windows_fx_version', None)
self.request_tracing_enabled = kwargs.get('request_tracing_enabled', None)
self.request_tracing_expiration_time = kwargs.get('request_tracing_expiration_time', None)
self.remote_debugging_enabled = kwargs.get('remote_debugging_enabled', None)
self.remote_debugging_version = kwargs.get('remote_debugging_version', None)
self.http_logging_enabled = kwargs.get('http_logging_enabled', None)
self.logs_directory_size_limit = kwargs.get('logs_directory_size_limit', None)
self.detailed_error_logging_enabled = kwargs.get('detailed_error_logging_enabled', None)
self.publishing_username = kwargs.get('publishing_username', None)
self.app_settings = kwargs.get('app_settings', None)
self.azure_storage_accounts = kwargs.get('azure_storage_accounts', None)
self.connection_strings = kwargs.get('connection_strings', None)
self.machine_key = None
self.handler_mappings = kwargs.get('handler_mappings', None)
self.document_root = kwargs.get('document_root', None)
self.scm_type = kwargs.get('scm_type', None)
self.use32_bit_worker_process = kwargs.get('use32_bit_worker_process', None)
self.web_sockets_enabled = kwargs.get('web_sockets_enabled', None)
self.always_on = kwargs.get('always_on', None)
self.java_version = kwargs.get('java_version', None)
self.java_container = kwargs.get('java_container', None)
self.java_container_version = kwargs.get('java_container_version', None)
self.app_command_line = kwargs.get('app_command_line', None)
self.managed_pipeline_mode = kwargs.get('managed_pipeline_mode', None)
self.virtual_applications = kwargs.get('virtual_applications', None)
self.load_balancing = kwargs.get('load_balancing', None)
self.experiments = kwargs.get('experiments', None)
self.limits = kwargs.get('limits', None)
self.auto_heal_enabled = kwargs.get('auto_heal_enabled', None)
self.auto_heal_rules = kwargs.get('auto_heal_rules', None)
self.tracing_options = kwargs.get('tracing_options', None)
self.vnet_name = kwargs.get('vnet_name', None)
self.cors = kwargs.get('cors', None)
self.push = kwargs.get('push', None)
self.api_definition = kwargs.get('api_definition', None)
self.auto_swap_slot_name = kwargs.get('auto_swap_slot_name', None)
self.local_my_sql_enabled = kwargs.get('local_my_sql_enabled', False)
self.managed_service_identity_id = kwargs.get('managed_service_identity_id', None)
self.x_managed_service_identity_id = kwargs.get('x_managed_service_identity_id', None)
self.ip_security_restrictions = kwargs.get('ip_security_restrictions', None)
self.scm_ip_security_restrictions = kwargs.get('scm_ip_security_restrictions', None)
self.scm_ip_security_restrictions_use_main = kwargs.get('scm_ip_security_restrictions_use_main', None)
self.http20_enabled = kwargs.get('http20_enabled', True)
self.min_tls_version = kwargs.get('min_tls_version', None)
self.ftps_state = kwargs.get('ftps_state', None)
self.reserved_instance_count = kwargs.get('reserved_instance_count', None)
class SiteConfigurationSnapshotInfo(ProxyOnlyResource):
"""A snapshot of a web app configuration.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar time: The time the snapshot was taken.
:vartype time: datetime
:ivar snapshot_id: The id of the snapshot
:vartype snapshot_id: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'time': {'readonly': True},
'snapshot_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'properties.time', 'type': 'iso-8601'},
'snapshot_id': {'key': 'properties.snapshotId', 'type': 'int'},
}
def __init__(self, **kwargs):
super(SiteConfigurationSnapshotInfo, self).__init__(**kwargs)
self.time = None
self.snapshot_id = None
class SiteExtensionInfo(ProxyOnlyResource):
"""Site Extension Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param extension_id: Site extension ID.
:type extension_id: str
:param title:
:type title: str
:param extension_type: Site extension type. Possible values include:
'Gallery', 'WebRoot'
:type extension_type: str or ~azure.mgmt.web.models.SiteExtensionType
:param summary: Summary description.
:type summary: str
:param description: Detailed description.
:type description: str
:param version: Version information.
:type version: str
:param extension_url: Extension URL.
:type extension_url: str
:param project_url: Project URL.
:type project_url: str
:param icon_url: Icon URL.
:type icon_url: str
:param license_url: License URL.
:type license_url: str
:param feed_url: Feed URL.
:type feed_url: str
:param authors: List of authors.
:type authors: list[str]
:param installer_command_line_params: Installer command line parameters.
:type installer_command_line_params: str
:param published_date_time: Published timestamp.
:type published_date_time: datetime
:param download_count: Count of downloads.
:type download_count: int
:param local_is_latest_version: <code>true</code> if the local version is
the latest version; <code>false</code> otherwise.
:type local_is_latest_version: bool
:param local_path: Local path.
:type local_path: str
:param installed_date_time: Installed timestamp.
:type installed_date_time: datetime
:param provisioning_state: Provisioning state.
:type provisioning_state: str
:param comment: Site Extension comment.
:type comment: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'extension_id': {'key': 'properties.extension_id', 'type': 'str'},
'title': {'key': 'properties.title', 'type': 'str'},
'extension_type': {'key': 'properties.extension_type', 'type': 'SiteExtensionType'},
'summary': {'key': 'properties.summary', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'extension_url': {'key': 'properties.extension_url', 'type': 'str'},
'project_url': {'key': 'properties.project_url', 'type': 'str'},
'icon_url': {'key': 'properties.icon_url', 'type': 'str'},
'license_url': {'key': 'properties.license_url', 'type': 'str'},
'feed_url': {'key': 'properties.feed_url', 'type': 'str'},
'authors': {'key': 'properties.authors', 'type': '[str]'},
'installer_command_line_params': {'key': 'properties.installer_command_line_params', 'type': 'str'},
'published_date_time': {'key': 'properties.published_date_time', 'type': 'iso-8601'},
'download_count': {'key': 'properties.download_count', 'type': 'int'},
'local_is_latest_version': {'key': 'properties.local_is_latest_version', 'type': 'bool'},
'local_path': {'key': 'properties.local_path', 'type': 'str'},
'installed_date_time': {'key': 'properties.installed_date_time', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'comment': {'key': 'properties.comment', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SiteExtensionInfo, self).__init__(**kwargs)
self.extension_id = kwargs.get('extension_id', None)
self.title = kwargs.get('title', None)
self.extension_type = kwargs.get('extension_type', None)
self.summary = kwargs.get('summary', None)
self.description = kwargs.get('description', None)
self.version = kwargs.get('version', None)
self.extension_url = kwargs.get('extension_url', None)
self.project_url = kwargs.get('project_url', None)
self.icon_url = kwargs.get('icon_url', None)
self.license_url = kwargs.get('license_url', None)
self.feed_url = kwargs.get('feed_url', None)
self.authors = kwargs.get('authors', None)
self.installer_command_line_params = kwargs.get('installer_command_line_params', None)
self.published_date_time = kwargs.get('published_date_time', None)
self.download_count = kwargs.get('download_count', None)
self.local_is_latest_version = kwargs.get('local_is_latest_version', None)
self.local_path = kwargs.get('local_path', None)
self.installed_date_time = kwargs.get('installed_date_time', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.comment = kwargs.get('comment', None)
class SiteInstance(ProxyOnlyResource):
"""Instance of an app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar site_instance_name: Name of instance.
:vartype site_instance_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'site_instance_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'site_instance_name': {'key': 'properties.siteInstanceName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SiteInstance, self).__init__(**kwargs)
self.site_instance_name = None
class SiteLimits(Model):
"""Metric limits set on an app.
:param max_percentage_cpu: Maximum allowed CPU usage percentage.
:type max_percentage_cpu: float
:param max_memory_in_mb: Maximum allowed memory usage in MB.
:type max_memory_in_mb: long
:param max_disk_size_in_mb: Maximum allowed disk size usage in MB.
:type max_disk_size_in_mb: long
"""
_attribute_map = {
'max_percentage_cpu': {'key': 'maxPercentageCpu', 'type': 'float'},
'max_memory_in_mb': {'key': 'maxMemoryInMb', 'type': 'long'},
'max_disk_size_in_mb': {'key': 'maxDiskSizeInMb', 'type': 'long'},
}
def __init__(self, **kwargs):
super(SiteLimits, self).__init__(**kwargs)
self.max_percentage_cpu = kwargs.get('max_percentage_cpu', None)
self.max_memory_in_mb = kwargs.get('max_memory_in_mb', None)
self.max_disk_size_in_mb = kwargs.get('max_disk_size_in_mb', None)
class SiteLogsConfig(ProxyOnlyResource):
"""Configuration of App Service site logs.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param application_logs: Application logs configuration.
:type application_logs: ~azure.mgmt.web.models.ApplicationLogsConfig
:param http_logs: HTTP logs configuration.
:type http_logs: ~azure.mgmt.web.models.HttpLogsConfig
:param failed_requests_tracing: Failed requests tracing configuration.
:type failed_requests_tracing: ~azure.mgmt.web.models.EnabledConfig
:param detailed_error_messages: Detailed error messages configuration.
:type detailed_error_messages: ~azure.mgmt.web.models.EnabledConfig
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'application_logs': {'key': 'properties.applicationLogs', 'type': 'ApplicationLogsConfig'},
'http_logs': {'key': 'properties.httpLogs', 'type': 'HttpLogsConfig'},
'failed_requests_tracing': {'key': 'properties.failedRequestsTracing', 'type': 'EnabledConfig'},
'detailed_error_messages': {'key': 'properties.detailedErrorMessages', 'type': 'EnabledConfig'},
}
def __init__(self, **kwargs):
super(SiteLogsConfig, self).__init__(**kwargs)
self.application_logs = kwargs.get('application_logs', None)
self.http_logs = kwargs.get('http_logs', None)
self.failed_requests_tracing = kwargs.get('failed_requests_tracing', None)
self.detailed_error_messages = kwargs.get('detailed_error_messages', None)
class SiteMachineKey(Model):
"""MachineKey of an app.
:param validation: MachineKey validation.
:type validation: str
:param validation_key: Validation key.
:type validation_key: str
:param decryption: Algorithm used for decryption.
:type decryption: str
:param decryption_key: Decryption key.
:type decryption_key: str
"""
_attribute_map = {
'validation': {'key': 'validation', 'type': 'str'},
'validation_key': {'key': 'validationKey', 'type': 'str'},
'decryption': {'key': 'decryption', 'type': 'str'},
'decryption_key': {'key': 'decryptionKey', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SiteMachineKey, self).__init__(**kwargs)
self.validation = kwargs.get('validation', None)
self.validation_key = kwargs.get('validation_key', None)
self.decryption = kwargs.get('decryption', None)
self.decryption_key = kwargs.get('decryption_key', None)
class SitePatchResource(ProxyOnlyResource):
"""ARM resource for a site.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar state: Current state of the app.
:vartype state: str
:ivar host_names: Hostnames associated with the app.
:vartype host_names: list[str]
:ivar repository_site_name: Name of the repository site.
:vartype repository_site_name: str
:ivar usage_state: State indicating whether the app has exceeded its quota
usage. Read-only. Possible values include: 'Normal', 'Exceeded'
:vartype usage_state: str or ~azure.mgmt.web.models.UsageState
:param enabled: <code>true</code> if the app is enabled; otherwise,
<code>false</code>. Setting this value to false disables the app (takes
the app offline).
:type enabled: bool
:ivar enabled_host_names: Enabled hostnames for the app.Hostnames need to
be assigned (see HostNames) AND enabled. Otherwise,
the app is not served on those hostnames.
:vartype enabled_host_names: list[str]
:ivar availability_state: Management information availability state for
the app. Possible values include: 'Normal', 'Limited',
'DisasterRecoveryMode'
:vartype availability_state: str or
~azure.mgmt.web.models.SiteAvailabilityState
:param host_name_ssl_states: Hostname SSL states are used to manage the
SSL bindings for app's hostnames.
:type host_name_ssl_states: list[~azure.mgmt.web.models.HostNameSslState]
:param server_farm_id: Resource ID of the associated App Service plan,
formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param reserved: <code>true</code> if reserved; otherwise,
<code>false</code>. Default value: False .
:type reserved: bool
:param is_xenon: Obsolete: Hyper-V sandbox. Default value: False .
:type is_xenon: bool
:param hyper_v: Hyper-V sandbox. Default value: False .
:type hyper_v: bool
:ivar last_modified_time_utc: Last time the app was modified, in UTC.
Read-only.
:vartype last_modified_time_utc: datetime
:param site_config: Configuration of the app.
:type site_config: ~azure.mgmt.web.models.SiteConfig
:ivar traffic_manager_host_names: Azure Traffic Manager hostnames
associated with the app. Read-only.
:vartype traffic_manager_host_names: list[str]
:param scm_site_also_stopped: <code>true</code> to stop SCM (KUDU) site
when the app is stopped; otherwise, <code>false</code>. The default is
<code>false</code>. Default value: False .
:type scm_site_also_stopped: bool
:ivar target_swap_slot: Specifies which deployment slot this app will swap
into. Read-only.
:vartype target_swap_slot: str
:param hosting_environment_profile: App Service Environment to use for the
app.
:type hosting_environment_profile:
~azure.mgmt.web.models.HostingEnvironmentProfile
:param client_affinity_enabled: <code>true</code> to enable client
affinity; <code>false</code> to stop sending session affinity cookies,
which route client requests in the same session to the same instance.
Default is <code>true</code>.
:type client_affinity_enabled: bool
:param client_cert_enabled: <code>true</code> to enable client certificate
authentication (TLS mutual authentication); otherwise, <code>false</code>.
Default is <code>false</code>.
:type client_cert_enabled: bool
:param client_cert_exclusion_paths: client certificate authentication
comma-separated exclusion paths
:type client_cert_exclusion_paths: str
:param host_names_disabled: <code>true</code> to disable the public
hostnames of the app; otherwise, <code>false</code>.
If <code>true</code>, the app is only accessible via API management
process.
:type host_names_disabled: bool
:ivar outbound_ip_addresses: List of IP addresses that the app uses for
outbound connections (e.g. database access). Includes VIPs from tenants
that site can be hosted with current settings. Read-only.
:vartype outbound_ip_addresses: str
:ivar possible_outbound_ip_addresses: List of IP addresses that the app
uses for outbound connections (e.g. database access). Includes VIPs from
all tenants. Read-only.
:vartype possible_outbound_ip_addresses: str
:param container_size: Size of the function container.
:type container_size: int
:param daily_memory_time_quota: Maximum allowed daily memory-time quota
(applicable on dynamic apps only).
:type daily_memory_time_quota: int
:ivar suspended_till: App suspended till in case memory-time quota is
exceeded.
:vartype suspended_till: datetime
:ivar max_number_of_workers: Maximum number of workers.
This only applies to Functions container.
:vartype max_number_of_workers: int
:param cloning_info: If specified during app creation, the app is cloned
from a source app.
:type cloning_info: ~azure.mgmt.web.models.CloningInfo
:ivar resource_group: Name of the resource group the app belongs to.
Read-only.
:vartype resource_group: str
:ivar is_default_container: <code>true</code> if the app is a default
container; otherwise, <code>false</code>.
:vartype is_default_container: bool
:ivar default_host_name: Default hostname of the app. Read-only.
:vartype default_host_name: str
:ivar slot_swap_status: Status of the last deployment slot swap operation.
:vartype slot_swap_status: ~azure.mgmt.web.models.SlotSwapStatus
:param https_only: HttpsOnly: configures a web site to accept only https
requests. Issues redirect for
http requests
:type https_only: bool
:param redundancy_mode: Site redundancy mode. Possible values include:
'None', 'Manual', 'Failover', 'ActiveActive', 'GeoRedundant'
:type redundancy_mode: str or ~azure.mgmt.web.models.RedundancyMode
:ivar in_progress_operation_id: Specifies an operation id if this site has
a pending operation.
:vartype in_progress_operation_id: str
:param geo_distributions: GeoDistributions for this site
:type geo_distributions: list[~azure.mgmt.web.models.GeoDistribution]
:param identity:
:type identity: ~azure.mgmt.web.models.ManagedServiceIdentity
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'readonly': True},
'host_names': {'readonly': True},
'repository_site_name': {'readonly': True},
'usage_state': {'readonly': True},
'enabled_host_names': {'readonly': True},
'availability_state': {'readonly': True},
'last_modified_time_utc': {'readonly': True},
'traffic_manager_host_names': {'readonly': True},
'target_swap_slot': {'readonly': True},
'outbound_ip_addresses': {'readonly': True},
'possible_outbound_ip_addresses': {'readonly': True},
'suspended_till': {'readonly': True},
'max_number_of_workers': {'readonly': True},
'resource_group': {'readonly': True},
'is_default_container': {'readonly': True},
'default_host_name': {'readonly': True},
'slot_swap_status': {'readonly': True},
'in_progress_operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'repository_site_name': {'key': 'properties.repositorySiteName', 'type': 'str'},
'usage_state': {'key': 'properties.usageState', 'type': 'UsageState'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'enabled_host_names': {'key': 'properties.enabledHostNames', 'type': '[str]'},
'availability_state': {'key': 'properties.availabilityState', 'type': 'SiteAvailabilityState'},
'host_name_ssl_states': {'key': 'properties.hostNameSslStates', 'type': '[HostNameSslState]'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'is_xenon': {'key': 'properties.isXenon', 'type': 'bool'},
'hyper_v': {'key': 'properties.hyperV', 'type': 'bool'},
'last_modified_time_utc': {'key': 'properties.lastModifiedTimeUtc', 'type': 'iso-8601'},
'site_config': {'key': 'properties.siteConfig', 'type': 'SiteConfig'},
'traffic_manager_host_names': {'key': 'properties.trafficManagerHostNames', 'type': '[str]'},
'scm_site_also_stopped': {'key': 'properties.scmSiteAlsoStopped', 'type': 'bool'},
'target_swap_slot': {'key': 'properties.targetSwapSlot', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'client_affinity_enabled': {'key': 'properties.clientAffinityEnabled', 'type': 'bool'},
'client_cert_enabled': {'key': 'properties.clientCertEnabled', 'type': 'bool'},
'client_cert_exclusion_paths': {'key': 'properties.clientCertExclusionPaths', 'type': 'str'},
'host_names_disabled': {'key': 'properties.hostNamesDisabled', 'type': 'bool'},
'outbound_ip_addresses': {'key': 'properties.outboundIpAddresses', 'type': 'str'},
'possible_outbound_ip_addresses': {'key': 'properties.possibleOutboundIpAddresses', 'type': 'str'},
'container_size': {'key': 'properties.containerSize', 'type': 'int'},
'daily_memory_time_quota': {'key': 'properties.dailyMemoryTimeQuota', 'type': 'int'},
'suspended_till': {'key': 'properties.suspendedTill', 'type': 'iso-8601'},
'max_number_of_workers': {'key': 'properties.maxNumberOfWorkers', 'type': 'int'},
'cloning_info': {'key': 'properties.cloningInfo', 'type': 'CloningInfo'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'is_default_container': {'key': 'properties.isDefaultContainer', 'type': 'bool'},
'default_host_name': {'key': 'properties.defaultHostName', 'type': 'str'},
'slot_swap_status': {'key': 'properties.slotSwapStatus', 'type': 'SlotSwapStatus'},
'https_only': {'key': 'properties.httpsOnly', 'type': 'bool'},
'redundancy_mode': {'key': 'properties.redundancyMode', 'type': 'RedundancyMode'},
'in_progress_operation_id': {'key': 'properties.inProgressOperationId', 'type': 'str'},
'geo_distributions': {'key': 'properties.geoDistributions', 'type': '[GeoDistribution]'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
}
def __init__(self, **kwargs):
super(SitePatchResource, self).__init__(**kwargs)
self.state = None
self.host_names = None
self.repository_site_name = None
self.usage_state = None
self.enabled = kwargs.get('enabled', None)
self.enabled_host_names = None
self.availability_state = None
self.host_name_ssl_states = kwargs.get('host_name_ssl_states', None)
self.server_farm_id = kwargs.get('server_farm_id', None)
self.reserved = kwargs.get('reserved', False)
self.is_xenon = kwargs.get('is_xenon', False)
self.hyper_v = kwargs.get('hyper_v', False)
self.last_modified_time_utc = None
self.site_config = kwargs.get('site_config', None)
self.traffic_manager_host_names = None
self.scm_site_also_stopped = kwargs.get('scm_site_also_stopped', False)
self.target_swap_slot = None
self.hosting_environment_profile = kwargs.get('hosting_environment_profile', None)
self.client_affinity_enabled = kwargs.get('client_affinity_enabled', None)
self.client_cert_enabled = kwargs.get('client_cert_enabled', None)
self.client_cert_exclusion_paths = kwargs.get('client_cert_exclusion_paths', None)
self.host_names_disabled = kwargs.get('host_names_disabled', None)
self.outbound_ip_addresses = None
self.possible_outbound_ip_addresses = None
self.container_size = kwargs.get('container_size', None)
self.daily_memory_time_quota = kwargs.get('daily_memory_time_quota', None)
self.suspended_till = None
self.max_number_of_workers = None
self.cloning_info = kwargs.get('cloning_info', None)
self.resource_group = None
self.is_default_container = None
self.default_host_name = None
self.slot_swap_status = None
self.https_only = kwargs.get('https_only', None)
self.redundancy_mode = kwargs.get('redundancy_mode', None)
self.in_progress_operation_id = None
self.geo_distributions = kwargs.get('geo_distributions', None)
self.identity = kwargs.get('identity', None)
class SitePhpErrorLogFlag(ProxyOnlyResource):
"""Used for getting PHP error logging flag.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param local_log_errors: Local log_errors setting.
:type local_log_errors: str
:param master_log_errors: Master log_errors setting.
:type master_log_errors: str
:param local_log_errors_max_length: Local log_errors_max_len setting.
:type local_log_errors_max_length: str
:param master_log_errors_max_length: Master log_errors_max_len setting.
:type master_log_errors_max_length: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'local_log_errors': {'key': 'properties.localLogErrors', 'type': 'str'},
'master_log_errors': {'key': 'properties.masterLogErrors', 'type': 'str'},
'local_log_errors_max_length': {'key': 'properties.localLogErrorsMaxLength', 'type': 'str'},
'master_log_errors_max_length': {'key': 'properties.masterLogErrorsMaxLength', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SitePhpErrorLogFlag, self).__init__(**kwargs)
self.local_log_errors = kwargs.get('local_log_errors', None)
self.master_log_errors = kwargs.get('master_log_errors', None)
self.local_log_errors_max_length = kwargs.get('local_log_errors_max_length', None)
self.master_log_errors_max_length = kwargs.get('master_log_errors_max_length', None)
class SiteSeal(Model):
"""Site seal.
All required parameters must be populated in order to send to Azure.
:param html: Required. HTML snippet
:type html: str
"""
_validation = {
'html': {'required': True},
}
_attribute_map = {
'html': {'key': 'html', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SiteSeal, self).__init__(**kwargs)
self.html = kwargs.get('html', None)
class SiteSealRequest(Model):
"""Site seal request.
:param light_theme: If <code>true</code> use the light color theme for
site seal; otherwise, use the default color theme.
:type light_theme: bool
:param locale: Locale of site seal.
:type locale: str
"""
_attribute_map = {
'light_theme': {'key': 'lightTheme', 'type': 'bool'},
'locale': {'key': 'locale', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SiteSealRequest, self).__init__(**kwargs)
self.light_theme = kwargs.get('light_theme', None)
self.locale = kwargs.get('locale', None)
class SiteSourceControl(ProxyOnlyResource):
"""Source control configuration for an app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param repo_url: Repository or source control URL.
:type repo_url: str
:param branch: Name of branch to use for deployment.
:type branch: str
:param is_manual_integration: <code>true</code> to limit to manual
integration; <code>false</code> to enable continuous integration (which
configures webhooks into online repos like GitHub).
:type is_manual_integration: bool
:param deployment_rollback_enabled: <code>true</code> to enable deployment
rollback; otherwise, <code>false</code>.
:type deployment_rollback_enabled: bool
:param is_mercurial: <code>true</code> for a Mercurial repository;
<code>false</code> for a Git repository.
:type is_mercurial: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'repo_url': {'key': 'properties.repoUrl', 'type': 'str'},
'branch': {'key': 'properties.branch', 'type': 'str'},
'is_manual_integration': {'key': 'properties.isManualIntegration', 'type': 'bool'},
'deployment_rollback_enabled': {'key': 'properties.deploymentRollbackEnabled', 'type': 'bool'},
'is_mercurial': {'key': 'properties.isMercurial', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(SiteSourceControl, self).__init__(**kwargs)
self.repo_url = kwargs.get('repo_url', None)
self.branch = kwargs.get('branch', None)
self.is_manual_integration = kwargs.get('is_manual_integration', None)
self.deployment_rollback_enabled = kwargs.get('deployment_rollback_enabled', None)
self.is_mercurial = kwargs.get('is_mercurial', None)
class SkuCapacity(Model):
"""Description of the App Service plan scale options.
:param minimum: Minimum number of workers for this App Service plan SKU.
:type minimum: int
:param maximum: Maximum number of workers for this App Service plan SKU.
:type maximum: int
:param default: Default number of workers for this App Service plan SKU.
:type default: int
:param scale_type: Available scale configurations for an App Service plan.
:type scale_type: str
"""
_attribute_map = {
'minimum': {'key': 'minimum', 'type': 'int'},
'maximum': {'key': 'maximum', 'type': 'int'},
'default': {'key': 'default', 'type': 'int'},
'scale_type': {'key': 'scaleType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SkuCapacity, self).__init__(**kwargs)
self.minimum = kwargs.get('minimum', None)
self.maximum = kwargs.get('maximum', None)
self.default = kwargs.get('default', None)
self.scale_type = kwargs.get('scale_type', None)
class SkuDescription(Model):
"""Description of a SKU for a scalable resource.
:param name: Name of the resource SKU.
:type name: str
:param tier: Service tier of the resource SKU.
:type tier: str
:param size: Size specifier of the resource SKU.
:type size: str
:param family: Family code of the resource SKU.
:type family: str
:param capacity: Current number of instances assigned to the resource.
:type capacity: int
:param sku_capacity: Min, max, and default scale values of the SKU.
:type sku_capacity: ~azure.mgmt.web.models.SkuCapacity
:param locations: Locations of the SKU.
:type locations: list[str]
:param capabilities: Capabilities of the SKU, e.g., is traffic manager
enabled?
:type capabilities: list[~azure.mgmt.web.models.Capability]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'size': {'key': 'size', 'type': 'str'},
'family': {'key': 'family', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
'sku_capacity': {'key': 'skuCapacity', 'type': 'SkuCapacity'},
'locations': {'key': 'locations', 'type': '[str]'},
'capabilities': {'key': 'capabilities', 'type': '[Capability]'},
}
def __init__(self, **kwargs):
super(SkuDescription, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.tier = kwargs.get('tier', None)
self.size = kwargs.get('size', None)
self.family = kwargs.get('family', None)
self.capacity = kwargs.get('capacity', None)
self.sku_capacity = kwargs.get('sku_capacity', None)
self.locations = kwargs.get('locations', None)
self.capabilities = kwargs.get('capabilities', None)
class SkuInfo(Model):
"""SKU discovery information.
:param resource_type: Resource type that this SKU applies to.
:type resource_type: str
:param sku: Name and tier of the SKU.
:type sku: ~azure.mgmt.web.models.SkuDescription
:param capacity: Min, max, and default scale values of the SKU.
:type capacity: ~azure.mgmt.web.models.SkuCapacity
"""
_attribute_map = {
'resource_type': {'key': 'resourceType', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'SkuDescription'},
'capacity': {'key': 'capacity', 'type': 'SkuCapacity'},
}
def __init__(self, **kwargs):
super(SkuInfo, self).__init__(**kwargs)
self.resource_type = kwargs.get('resource_type', None)
self.sku = kwargs.get('sku', None)
self.capacity = kwargs.get('capacity', None)
class SkuInfos(Model):
"""Collection of SKU information.
:param resource_type: Resource type that this SKU applies to.
:type resource_type: str
:param skus: List of SKUs the subscription is able to use.
:type skus: list[~azure.mgmt.web.models.GlobalCsmSkuDescription]
"""
_attribute_map = {
'resource_type': {'key': 'resourceType', 'type': 'str'},
'skus': {'key': 'skus', 'type': '[GlobalCsmSkuDescription]'},
}
def __init__(self, **kwargs):
super(SkuInfos, self).__init__(**kwargs)
self.resource_type = kwargs.get('resource_type', None)
self.skus = kwargs.get('skus', None)
class SlotConfigNamesResource(ProxyOnlyResource):
"""Slot Config names azure resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param connection_string_names: List of connection string names.
:type connection_string_names: list[str]
:param app_setting_names: List of application settings names.
:type app_setting_names: list[str]
:param azure_storage_config_names: List of external Azure storage account
identifiers.
:type azure_storage_config_names: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string_names': {'key': 'properties.connectionStringNames', 'type': '[str]'},
'app_setting_names': {'key': 'properties.appSettingNames', 'type': '[str]'},
'azure_storage_config_names': {'key': 'properties.azureStorageConfigNames', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(SlotConfigNamesResource, self).__init__(**kwargs)
self.connection_string_names = kwargs.get('connection_string_names', None)
self.app_setting_names = kwargs.get('app_setting_names', None)
self.azure_storage_config_names = kwargs.get('azure_storage_config_names', None)
class SlotDifference(ProxyOnlyResource):
"""A setting difference between two deployment slots of an app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar level: Level of the difference: Information, Warning or Error.
:vartype level: str
:ivar setting_type: The type of the setting: General, AppSetting or
ConnectionString.
:vartype setting_type: str
:ivar diff_rule: Rule that describes how to process the setting difference
during a slot swap.
:vartype diff_rule: str
:ivar setting_name: Name of the setting.
:vartype setting_name: str
:ivar value_in_current_slot: Value of the setting in the current slot.
:vartype value_in_current_slot: str
:ivar value_in_target_slot: Value of the setting in the target slot.
:vartype value_in_target_slot: str
:ivar description: Description of the setting difference.
:vartype description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'level': {'readonly': True},
'setting_type': {'readonly': True},
'diff_rule': {'readonly': True},
'setting_name': {'readonly': True},
'value_in_current_slot': {'readonly': True},
'value_in_target_slot': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'level': {'key': 'properties.level', 'type': 'str'},
'setting_type': {'key': 'properties.settingType', 'type': 'str'},
'diff_rule': {'key': 'properties.diffRule', 'type': 'str'},
'setting_name': {'key': 'properties.settingName', 'type': 'str'},
'value_in_current_slot': {'key': 'properties.valueInCurrentSlot', 'type': 'str'},
'value_in_target_slot': {'key': 'properties.valueInTargetSlot', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SlotDifference, self).__init__(**kwargs)
self.level = None
self.setting_type = None
self.diff_rule = None
self.setting_name = None
self.value_in_current_slot = None
self.value_in_target_slot = None
self.description = None
class SlotSwapStatus(Model):
"""The status of the last successful slot swap operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar timestamp_utc: The time the last successful slot swap completed.
:vartype timestamp_utc: datetime
:ivar source_slot_name: The source slot of the last swap operation.
:vartype source_slot_name: str
:ivar destination_slot_name: The destination slot of the last swap
operation.
:vartype destination_slot_name: str
"""
_validation = {
'timestamp_utc': {'readonly': True},
'source_slot_name': {'readonly': True},
'destination_slot_name': {'readonly': True},
}
_attribute_map = {
'timestamp_utc': {'key': 'timestampUtc', 'type': 'iso-8601'},
'source_slot_name': {'key': 'sourceSlotName', 'type': 'str'},
'destination_slot_name': {'key': 'destinationSlotName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SlotSwapStatus, self).__init__(**kwargs)
self.timestamp_utc = None
self.source_slot_name = None
self.destination_slot_name = None
class SlowRequestsBasedTrigger(Model):
"""Trigger based on request execution time.
:param time_taken: Time taken.
:type time_taken: str
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'time_taken': {'key': 'timeTaken', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SlowRequestsBasedTrigger, self).__init__(**kwargs)
self.time_taken = kwargs.get('time_taken', None)
self.count = kwargs.get('count', None)
self.time_interval = kwargs.get('time_interval', None)
class Snapshot(ProxyOnlyResource):
"""A snapshot of an app.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar time: The time the snapshot was taken.
:vartype time: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'properties.time', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Snapshot, self).__init__(**kwargs)
self.time = None
class SnapshotRecoverySource(Model):
"""Specifies the web app that snapshot contents will be retrieved from.
:param location: Geographical location of the source web app, e.g.
SouthEastAsia, SouthCentralUS
:type location: str
:param id: ARM resource ID of the source app.
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}
for production slots and
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slotName}
for other slots.
:type id: str
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, **kwargs):
super(SnapshotRecoverySource, self).__init__(**kwargs)
self.location = kwargs.get('location', None)
self.id = kwargs.get('id', None)
class SnapshotRestoreRequest(ProxyOnlyResource):
"""Details about app recovery operation.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param snapshot_time: Point in time in which the app restore should be
done, formatted as a DateTime string.
:type snapshot_time: str
:param recovery_source: Optional. Specifies the web app that snapshot
contents will be retrieved from.
If empty, the targeted web app will be used as the source.
:type recovery_source: ~azure.mgmt.web.models.SnapshotRecoverySource
:param overwrite: Required. If <code>true</code> the restore operation can
overwrite source app; otherwise, <code>false</code>.
:type overwrite: bool
:param recover_configuration: If true, site configuration, in addition to
content, will be reverted.
:type recover_configuration: bool
:param ignore_conflicting_host_names: If true, custom hostname conflicts
will be ignored when recovering to a target web app.
This setting is only necessary when RecoverConfiguration is enabled.
:type ignore_conflicting_host_names: bool
:param use_dr_secondary: If true, the snapshot is retrieved from
DRSecondary endpoint.
:type use_dr_secondary: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'overwrite': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'str'},
'recovery_source': {'key': 'properties.recoverySource', 'type': 'SnapshotRecoverySource'},
'overwrite': {'key': 'properties.overwrite', 'type': 'bool'},
'recover_configuration': {'key': 'properties.recoverConfiguration', 'type': 'bool'},
'ignore_conflicting_host_names': {'key': 'properties.ignoreConflictingHostNames', 'type': 'bool'},
'use_dr_secondary': {'key': 'properties.useDRSecondary', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(SnapshotRestoreRequest, self).__init__(**kwargs)
self.snapshot_time = kwargs.get('snapshot_time', None)
self.recovery_source = kwargs.get('recovery_source', None)
self.overwrite = kwargs.get('overwrite', None)
self.recover_configuration = kwargs.get('recover_configuration', None)
self.ignore_conflicting_host_names = kwargs.get('ignore_conflicting_host_names', None)
self.use_dr_secondary = kwargs.get('use_dr_secondary', None)
class Solution(Model):
"""Class Representing Solution for problems detected.
:param id: Solution Id.
:type id: float
:param display_name: Display Name of the solution
:type display_name: str
:param order: Order of the solution.
:type order: float
:param description: Description of the solution
:type description: str
:param type: Type of Solution. Possible values include: 'QuickSolution',
'DeepInvestigation', 'BestPractices'
:type type: str or ~azure.mgmt.web.models.SolutionType
:param data: Solution Data.
:type data: list[list[~azure.mgmt.web.models.NameValuePair]]
:param metadata: Solution Metadata.
:type metadata: list[list[~azure.mgmt.web.models.NameValuePair]]
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'float'},
'display_name': {'key': 'displayName', 'type': 'str'},
'order': {'key': 'order', 'type': 'float'},
'description': {'key': 'description', 'type': 'str'},
'type': {'key': 'type', 'type': 'SolutionType'},
'data': {'key': 'data', 'type': '[[NameValuePair]]'},
'metadata': {'key': 'metadata', 'type': '[[NameValuePair]]'},
}
def __init__(self, **kwargs):
super(Solution, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.display_name = kwargs.get('display_name', None)
self.order = kwargs.get('order', None)
self.description = kwargs.get('description', None)
self.type = kwargs.get('type', None)
self.data = kwargs.get('data', None)
self.metadata = kwargs.get('metadata', None)
class SourceControl(ProxyOnlyResource):
"""The source control OAuth token.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param token: OAuth access token.
:type token: str
:param token_secret: OAuth access token secret.
:type token_secret: str
:param refresh_token: OAuth refresh token.
:type refresh_token: str
:param expiration_time: OAuth token expiration.
:type expiration_time: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'token': {'key': 'properties.token', 'type': 'str'},
'token_secret': {'key': 'properties.tokenSecret', 'type': 'str'},
'refresh_token': {'key': 'properties.refreshToken', 'type': 'str'},
'expiration_time': {'key': 'properties.expirationTime', 'type': 'iso-8601'},
}
def __init__(self, **kwargs):
super(SourceControl, self).__init__(**kwargs)
self.token = kwargs.get('token', None)
self.token_secret = kwargs.get('token_secret', None)
self.refresh_token = kwargs.get('refresh_token', None)
self.expiration_time = kwargs.get('expiration_time', None)
class StackMajorVersion(Model):
"""Application stack major version.
:param display_version: Application stack major version (display only).
:type display_version: str
:param runtime_version: Application stack major version (runtime only).
:type runtime_version: str
:param is_default: <code>true</code> if this is the default major version;
otherwise, <code>false</code>.
:type is_default: bool
:param minor_versions: Minor versions associated with the major version.
:type minor_versions: list[~azure.mgmt.web.models.StackMinorVersion]
:param application_insights: <code>true</code> if this supports
Application Insights; otherwise, <code>false</code>.
:type application_insights: bool
"""
_attribute_map = {
'display_version': {'key': 'displayVersion', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'minor_versions': {'key': 'minorVersions', 'type': '[StackMinorVersion]'},
'application_insights': {'key': 'applicationInsights', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(StackMajorVersion, self).__init__(**kwargs)
self.display_version = kwargs.get('display_version', None)
self.runtime_version = kwargs.get('runtime_version', None)
self.is_default = kwargs.get('is_default', None)
self.minor_versions = kwargs.get('minor_versions', None)
self.application_insights = kwargs.get('application_insights', None)
class StackMinorVersion(Model):
"""Application stack minor version.
:param display_version: Application stack minor version (display only).
:type display_version: str
:param runtime_version: Application stack minor version (runtime only).
:type runtime_version: str
:param is_default: <code>true</code> if this is the default minor version;
otherwise, <code>false</code>.
:type is_default: bool
:param is_remote_debugging_enabled: <code>true</code> if this supports
Remote Debugging, otherwise <code>false</code>.
:type is_remote_debugging_enabled: bool
"""
_attribute_map = {
'display_version': {'key': 'displayVersion', 'type': 'str'},
'runtime_version': {'key': 'runtimeVersion', 'type': 'str'},
'is_default': {'key': 'isDefault', 'type': 'bool'},
'is_remote_debugging_enabled': {'key': 'isRemoteDebuggingEnabled', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(StackMinorVersion, self).__init__(**kwargs)
self.display_version = kwargs.get('display_version', None)
self.runtime_version = kwargs.get('runtime_version', None)
self.is_default = kwargs.get('is_default', None)
self.is_remote_debugging_enabled = kwargs.get('is_remote_debugging_enabled', None)
class StampCapacity(Model):
"""Stamp capacity information.
:param name: Name of the stamp.
:type name: str
:param available_capacity: Available capacity (# of machines, bytes of
storage etc...).
:type available_capacity: long
:param total_capacity: Total capacity (# of machines, bytes of storage
etc...).
:type total_capacity: long
:param unit: Name of the unit.
:type unit: str
:param compute_mode: Shared/dedicated workers. Possible values include:
'Shared', 'Dedicated', 'Dynamic'
:type compute_mode: str or ~azure.mgmt.web.models.ComputeModeOptions
:param worker_size: Size of the machines. Possible values include:
'Small', 'Medium', 'Large', 'D1', 'D2', 'D3', 'Default'
:type worker_size: str or ~azure.mgmt.web.models.WorkerSizeOptions
:param worker_size_id: Size ID of machines:
0 - Small
1 - Medium
2 - Large
:type worker_size_id: int
:param exclude_from_capacity_allocation: If <code>true</code>, it includes
basic apps.
Basic apps are not used for capacity allocation.
:type exclude_from_capacity_allocation: bool
:param is_applicable_for_all_compute_modes: <code>true</code> if capacity
is applicable for all apps; otherwise, <code>false</code>.
:type is_applicable_for_all_compute_modes: bool
:param site_mode: Shared or Dedicated.
:type site_mode: str
:param is_linux: Is this a linux stamp capacity
:type is_linux: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'available_capacity': {'key': 'availableCapacity', 'type': 'long'},
'total_capacity': {'key': 'totalCapacity', 'type': 'long'},
'unit': {'key': 'unit', 'type': 'str'},
'compute_mode': {'key': 'computeMode', 'type': 'ComputeModeOptions'},
'worker_size': {'key': 'workerSize', 'type': 'WorkerSizeOptions'},
'worker_size_id': {'key': 'workerSizeId', 'type': 'int'},
'exclude_from_capacity_allocation': {'key': 'excludeFromCapacityAllocation', 'type': 'bool'},
'is_applicable_for_all_compute_modes': {'key': 'isApplicableForAllComputeModes', 'type': 'bool'},
'site_mode': {'key': 'siteMode', 'type': 'str'},
'is_linux': {'key': 'isLinux', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(StampCapacity, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.available_capacity = kwargs.get('available_capacity', None)
self.total_capacity = kwargs.get('total_capacity', None)
self.unit = kwargs.get('unit', None)
self.compute_mode = kwargs.get('compute_mode', None)
self.worker_size = kwargs.get('worker_size', None)
self.worker_size_id = kwargs.get('worker_size_id', None)
self.exclude_from_capacity_allocation = kwargs.get('exclude_from_capacity_allocation', None)
self.is_applicable_for_all_compute_modes = kwargs.get('is_applicable_for_all_compute_modes', None)
self.site_mode = kwargs.get('site_mode', None)
self.is_linux = kwargs.get('is_linux', None)
class StatusCodesBasedTrigger(Model):
"""Trigger based on status code.
:param status: HTTP status code.
:type status: int
:param sub_status: Request Sub Status.
:type sub_status: int
:param win32_status: Win32 error code.
:type win32_status: int
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'int'},
'sub_status': {'key': 'subStatus', 'type': 'int'},
'win32_status': {'key': 'win32Status', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(self, **kwargs):
super(StatusCodesBasedTrigger, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.sub_status = kwargs.get('sub_status', None)
self.win32_status = kwargs.get('win32_status', None)
self.count = kwargs.get('count', None)
self.time_interval = kwargs.get('time_interval', None)
class StorageMigrationOptions(ProxyOnlyResource):
"""Options for app content migration.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param azurefiles_connection_string: Required. AzureFiles connection
string.
:type azurefiles_connection_string: str
:param azurefiles_share: Required. AzureFiles share.
:type azurefiles_share: str
:param switch_site_after_migration: <code>true</code>if the app should be
switched over; otherwise, <code>false</code>. Default value: False .
:type switch_site_after_migration: bool
:param block_write_access_to_site: <code>true</code> if the app should be
read only during copy operation; otherwise, <code>false</code>. Default
value: False .
:type block_write_access_to_site: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'azurefiles_connection_string': {'required': True},
'azurefiles_share': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'azurefiles_connection_string': {'key': 'properties.azurefilesConnectionString', 'type': 'str'},
'azurefiles_share': {'key': 'properties.azurefilesShare', 'type': 'str'},
'switch_site_after_migration': {'key': 'properties.switchSiteAfterMigration', 'type': 'bool'},
'block_write_access_to_site': {'key': 'properties.blockWriteAccessToSite', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(StorageMigrationOptions, self).__init__(**kwargs)
self.azurefiles_connection_string = kwargs.get('azurefiles_connection_string', None)
self.azurefiles_share = kwargs.get('azurefiles_share', None)
self.switch_site_after_migration = kwargs.get('switch_site_after_migration', False)
self.block_write_access_to_site = kwargs.get('block_write_access_to_site', False)
class StorageMigrationResponse(ProxyOnlyResource):
"""Response for a migration of app content request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar operation_id: When server starts the migration process, it will
return an operation ID identifying that particular migration operation.
:vartype operation_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
}
def __init__(self, **kwargs):
super(StorageMigrationResponse, self).__init__(**kwargs)
self.operation_id = None
class StringDictionary(ProxyOnlyResource):
"""String dictionary resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Settings.
:type properties: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(self, **kwargs):
super(StringDictionary, self).__init__(**kwargs)
self.properties = kwargs.get('properties', None)
class SwiftVirtualNetwork(ProxyOnlyResource):
"""Swift Virtual Network Contract. This is used to enable the new Swift way of
doing virtual network integration.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param subnet_resource_id: The Virtual Network subnet's resource ID. This
is the subnet that this Web App will join. This subnet must have a
delegation to Microsoft.Web/serverFarms defined first.
:type subnet_resource_id: str
:param swift_supported: A flag that specifies if the scale unit this Web
App is on supports Swift integration.
:type swift_supported: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet_resource_id': {'key': 'properties.subnetResourceId', 'type': 'str'},
'swift_supported': {'key': 'properties.swiftSupported', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(SwiftVirtualNetwork, self).__init__(**kwargs)
self.subnet_resource_id = kwargs.get('subnet_resource_id', None)
self.swift_supported = kwargs.get('swift_supported', None)
class TldLegalAgreement(Model):
"""Legal agreement for a top level domain.
All required parameters must be populated in order to send to Azure.
:param agreement_key: Required. Unique identifier for the agreement.
:type agreement_key: str
:param title: Required. Agreement title.
:type title: str
:param content: Required. Agreement details.
:type content: str
:param url: URL where a copy of the agreement details is hosted.
:type url: str
"""
_validation = {
'agreement_key': {'required': True},
'title': {'required': True},
'content': {'required': True},
}
_attribute_map = {
'agreement_key': {'key': 'agreementKey', 'type': 'str'},
'title': {'key': 'title', 'type': 'str'},
'content': {'key': 'content', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
}
def __init__(self, **kwargs):
super(TldLegalAgreement, self).__init__(**kwargs)
self.agreement_key = kwargs.get('agreement_key', None)
self.title = kwargs.get('title', None)
self.content = kwargs.get('content', None)
self.url = kwargs.get('url', None)
class TopLevelDomain(ProxyOnlyResource):
"""A top level domain object.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param privacy: If <code>true</code>, then the top level domain supports
domain privacy; otherwise, <code>false</code>.
:type privacy: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'privacy': {'key': 'properties.privacy', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(TopLevelDomain, self).__init__(**kwargs)
self.privacy = kwargs.get('privacy', None)
class TopLevelDomainAgreementOption(Model):
"""Options for retrieving the list of top level domain legal agreements.
:param include_privacy: If <code>true</code>, then the list of agreements
will include agreements for domain privacy as well; otherwise,
<code>false</code>.
:type include_privacy: bool
:param for_transfer: If <code>true</code>, then the list of agreements
will include agreements for domain transfer as well; otherwise,
<code>false</code>.
:type for_transfer: bool
"""
_attribute_map = {
'include_privacy': {'key': 'includePrivacy', 'type': 'bool'},
'for_transfer': {'key': 'forTransfer', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(TopLevelDomainAgreementOption, self).__init__(**kwargs)
self.include_privacy = kwargs.get('include_privacy', None)
self.for_transfer = kwargs.get('for_transfer', None)
class TriggeredJobHistory(ProxyOnlyResource):
"""Triggered Web Job History. List of Triggered Web Job Run Information
elements.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param runs: List of triggered web job runs.
:type runs: list[~azure.mgmt.web.models.TriggeredJobRun]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'runs': {'key': 'properties.runs', 'type': '[TriggeredJobRun]'},
}
def __init__(self, **kwargs):
super(TriggeredJobHistory, self).__init__(**kwargs)
self.runs = kwargs.get('runs', None)
class TriggeredJobRun(ProxyOnlyResource):
"""Triggered Web Job Run Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param web_job_id: Job ID.
:type web_job_id: str
:param web_job_name: Job name.
:type web_job_name: str
:param status: Job status. Possible values include: 'Success', 'Failed',
'Error'
:type status: str or ~azure.mgmt.web.models.TriggeredWebJobStatus
:param start_time: Start time.
:type start_time: datetime
:param end_time: End time.
:type end_time: datetime
:param duration: Job duration.
:type duration: str
:param output_url: Output URL.
:type output_url: str
:param error_url: Error URL.
:type error_url: str
:param url: Job URL.
:type url: str
:param job_name: Job name.
:type job_name: str
:param trigger: Job trigger.
:type trigger: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'web_job_id': {'key': 'properties.web_job_id', 'type': 'str'},
'web_job_name': {'key': 'properties.web_job_name', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'TriggeredWebJobStatus'},
'start_time': {'key': 'properties.start_time', 'type': 'iso-8601'},
'end_time': {'key': 'properties.end_time', 'type': 'iso-8601'},
'duration': {'key': 'properties.duration', 'type': 'str'},
'output_url': {'key': 'properties.output_url', 'type': 'str'},
'error_url': {'key': 'properties.error_url', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'job_name': {'key': 'properties.job_name', 'type': 'str'},
'trigger': {'key': 'properties.trigger', 'type': 'str'},
}
def __init__(self, **kwargs):
super(TriggeredJobRun, self).__init__(**kwargs)
self.web_job_id = kwargs.get('web_job_id', None)
self.web_job_name = kwargs.get('web_job_name', None)
self.status = kwargs.get('status', None)
self.start_time = kwargs.get('start_time', None)
self.end_time = kwargs.get('end_time', None)
self.duration = kwargs.get('duration', None)
self.output_url = kwargs.get('output_url', None)
self.error_url = kwargs.get('error_url', None)
self.url = kwargs.get('url', None)
self.job_name = kwargs.get('job_name', None)
self.trigger = kwargs.get('trigger', None)
class TriggeredWebJob(ProxyOnlyResource):
"""Triggered Web Job Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param latest_run: Latest job run information.
:type latest_run: ~azure.mgmt.web.models.TriggeredJobRun
:param history_url: History URL.
:type history_url: str
:param scheduler_logs_url: Scheduler Logs URL.
:type scheduler_logs_url: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param web_job_type: Job type. Possible values include: 'Continuous',
'Triggered'
:type web_job_type: str or ~azure.mgmt.web.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, object]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'latest_run': {'key': 'properties.latest_run', 'type': 'TriggeredJobRun'},
'history_url': {'key': 'properties.history_url', 'type': 'str'},
'scheduler_logs_url': {'key': 'properties.scheduler_logs_url', 'type': 'str'},
'run_command': {'key': 'properties.run_command', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extra_info_url', 'type': 'str'},
'web_job_type': {'key': 'properties.web_job_type', 'type': 'WebJobType'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.using_sdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(self, **kwargs):
super(TriggeredWebJob, self).__init__(**kwargs)
self.latest_run = kwargs.get('latest_run', None)
self.history_url = kwargs.get('history_url', None)
self.scheduler_logs_url = kwargs.get('scheduler_logs_url', None)
self.run_command = kwargs.get('run_command', None)
self.url = kwargs.get('url', None)
self.extra_info_url = kwargs.get('extra_info_url', None)
self.web_job_type = kwargs.get('web_job_type', None)
self.error = kwargs.get('error', None)
self.using_sdk = kwargs.get('using_sdk', None)
self.settings = kwargs.get('settings', None)
class Usage(ProxyOnlyResource):
"""Usage of the quota resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar display_name: Friendly name shown in the UI.
:vartype display_name: str
:ivar resource_name: Name of the quota resource.
:vartype resource_name: str
:ivar unit: Units of measurement for the quota resource.
:vartype unit: str
:ivar current_value: The current value of the resource counter.
:vartype current_value: long
:ivar limit: The resource limit.
:vartype limit: long
:ivar next_reset_time: Next reset time for the resource counter.
:vartype next_reset_time: datetime
:ivar compute_mode: Compute mode used for this usage. Possible values
include: 'Shared', 'Dedicated', 'Dynamic'
:vartype compute_mode: str or ~azure.mgmt.web.models.ComputeModeOptions
:ivar site_mode: Site mode used for this usage.
:vartype site_mode: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'display_name': {'readonly': True},
'resource_name': {'readonly': True},
'unit': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'next_reset_time': {'readonly': True},
'compute_mode': {'readonly': True},
'site_mode': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'resource_name': {'key': 'properties.resourceName', 'type': 'str'},
'unit': {'key': 'properties.unit', 'type': 'str'},
'current_value': {'key': 'properties.currentValue', 'type': 'long'},
'limit': {'key': 'properties.limit', 'type': 'long'},
'next_reset_time': {'key': 'properties.nextResetTime', 'type': 'iso-8601'},
'compute_mode': {'key': 'properties.computeMode', 'type': 'ComputeModeOptions'},
'site_mode': {'key': 'properties.siteMode', 'type': 'str'},
}
def __init__(self, **kwargs):
super(Usage, self).__init__(**kwargs)
self.display_name = None
self.resource_name = None
self.unit = None
self.current_value = None
self.limit = None
self.next_reset_time = None
self.compute_mode = None
self.site_mode = None
class User(ProxyOnlyResource):
"""User credentials used for publishing activity.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param publishing_user_name: Required. Username used for publishing.
:type publishing_user_name: str
:param publishing_password: Password used for publishing.
:type publishing_password: str
:param publishing_password_hash: Password hash used for publishing.
:type publishing_password_hash: str
:param publishing_password_hash_salt: Password hash salt used for
publishing.
:type publishing_password_hash_salt: str
:param scm_uri: Url of SCM site.
:type scm_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'publishing_user_name': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'publishing_user_name': {'key': 'properties.publishingUserName', 'type': 'str'},
'publishing_password': {'key': 'properties.publishingPassword', 'type': 'str'},
'publishing_password_hash': {'key': 'properties.publishingPasswordHash', 'type': 'str'},
'publishing_password_hash_salt': {'key': 'properties.publishingPasswordHashSalt', 'type': 'str'},
'scm_uri': {'key': 'properties.scmUri', 'type': 'str'},
}
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
self.publishing_user_name = kwargs.get('publishing_user_name', None)
self.publishing_password = kwargs.get('publishing_password', None)
self.publishing_password_hash = kwargs.get('publishing_password_hash', None)
self.publishing_password_hash_salt = kwargs.get('publishing_password_hash_salt', None)
self.scm_uri = kwargs.get('scm_uri', None)
class ValidateContainerSettingsRequest(Model):
"""Container settings validation request context.
:param base_url: Base URL of the container registry
:type base_url: str
:param username: Username for to access the container registry
:type username: str
:param password: Password for to access the container registry
:type password: str
:param repository: Repository name (image name)
:type repository: str
:param tag: Image tag
:type tag: str
:param platform: Platform (windows or linux)
:type platform: str
"""
_attribute_map = {
'base_url': {'key': 'baseUrl', 'type': 'str'},
'username': {'key': 'username', 'type': 'str'},
'password': {'key': 'password', 'type': 'str'},
'repository': {'key': 'repository', 'type': 'str'},
'tag': {'key': 'tag', 'type': 'str'},
'platform': {'key': 'platform', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ValidateContainerSettingsRequest, self).__init__(**kwargs)
self.base_url = kwargs.get('base_url', None)
self.username = kwargs.get('username', None)
self.password = kwargs.get('password', None)
self.repository = kwargs.get('repository', None)
self.tag = kwargs.get('tag', None)
self.platform = kwargs.get('platform', None)
class ValidateRequest(Model):
"""Resource validation request content.
All required parameters must be populated in order to send to Azure.
:param name: Required. Resource name to verify.
:type name: str
:param type: Required. Resource type used for verification. Possible
values include: 'ServerFarm', 'Site'
:type type: str or ~azure.mgmt.web.models.ValidateResourceTypes
:param location: Required. Expected location of the resource.
:type location: str
:param server_farm_id: ARM resource ID of an App Service plan that would
host the app.
:type server_farm_id: str
:param sku_name: Name of the target SKU for the App Service plan.
:type sku_name: str
:param need_linux_workers: <code>true</code> if App Service plan is for
Linux workers; otherwise, <code>false</code>.
:type need_linux_workers: bool
:param is_spot: <code>true</code> if App Service plan is for Spot
instances; otherwise, <code>false</code>.
:type is_spot: bool
:param capacity: Target capacity of the App Service plan (number of VMs).
:type capacity: int
:param hosting_environment: Name of App Service Environment where app or
App Service plan should be created.
:type hosting_environment: str
:param is_xenon: <code>true</code> if App Service plan is running as a
windows container
:type is_xenon: bool
"""
_validation = {
'name': {'required': True},
'type': {'required': True},
'location': {'required': True},
'capacity': {'minimum': 1},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'sku_name': {'key': 'properties.skuName', 'type': 'str'},
'need_linux_workers': {'key': 'properties.needLinuxWorkers', 'type': 'bool'},
'is_spot': {'key': 'properties.isSpot', 'type': 'bool'},
'capacity': {'key': 'properties.capacity', 'type': 'int'},
'hosting_environment': {'key': 'properties.hostingEnvironment', 'type': 'str'},
'is_xenon': {'key': 'properties.isXenon', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(ValidateRequest, self).__init__(**kwargs)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.location = kwargs.get('location', None)
self.server_farm_id = kwargs.get('server_farm_id', None)
self.sku_name = kwargs.get('sku_name', None)
self.need_linux_workers = kwargs.get('need_linux_workers', None)
self.is_spot = kwargs.get('is_spot', None)
self.capacity = kwargs.get('capacity', None)
self.hosting_environment = kwargs.get('hosting_environment', None)
self.is_xenon = kwargs.get('is_xenon', None)
class ValidateResponse(Model):
"""Describes the result of resource validation.
:param status: Result of validation.
:type status: str
:param error: Error details for the case when validation fails.
:type error: ~azure.mgmt.web.models.ValidateResponseError
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'ValidateResponseError'},
}
def __init__(self, **kwargs):
super(ValidateResponse, self).__init__(**kwargs)
self.status = kwargs.get('status', None)
self.error = kwargs.get('error', None)
class ValidateResponseError(Model):
"""Error details for when validation fails.
:param code: Validation error code.
:type code: str
:param message: Validation error message.
:type message: str
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ValidateResponseError, self).__init__(**kwargs)
self.code = kwargs.get('code', None)
self.message = kwargs.get('message', None)
class VirtualApplication(Model):
"""Virtual application in an app.
:param virtual_path: Virtual path.
:type virtual_path: str
:param physical_path: Physical path.
:type physical_path: str
:param preload_enabled: <code>true</code> if preloading is enabled;
otherwise, <code>false</code>.
:type preload_enabled: bool
:param virtual_directories: Virtual directories for virtual application.
:type virtual_directories: list[~azure.mgmt.web.models.VirtualDirectory]
"""
_attribute_map = {
'virtual_path': {'key': 'virtualPath', 'type': 'str'},
'physical_path': {'key': 'physicalPath', 'type': 'str'},
'preload_enabled': {'key': 'preloadEnabled', 'type': 'bool'},
'virtual_directories': {'key': 'virtualDirectories', 'type': '[VirtualDirectory]'},
}
def __init__(self, **kwargs):
super(VirtualApplication, self).__init__(**kwargs)
self.virtual_path = kwargs.get('virtual_path', None)
self.physical_path = kwargs.get('physical_path', None)
self.preload_enabled = kwargs.get('preload_enabled', None)
self.virtual_directories = kwargs.get('virtual_directories', None)
class VirtualDirectory(Model):
"""Directory for virtual application.
:param virtual_path: Path to virtual application.
:type virtual_path: str
:param physical_path: Physical path.
:type physical_path: str
"""
_attribute_map = {
'virtual_path': {'key': 'virtualPath', 'type': 'str'},
'physical_path': {'key': 'physicalPath', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VirtualDirectory, self).__init__(**kwargs)
self.virtual_path = kwargs.get('virtual_path', None)
self.physical_path = kwargs.get('physical_path', None)
class VirtualIPMapping(Model):
"""Virtual IP mapping.
:param virtual_ip: Virtual IP address.
:type virtual_ip: str
:param internal_http_port: Internal HTTP port.
:type internal_http_port: int
:param internal_https_port: Internal HTTPS port.
:type internal_https_port: int
:param in_use: Is virtual IP mapping in use.
:type in_use: bool
"""
_attribute_map = {
'virtual_ip': {'key': 'virtualIP', 'type': 'str'},
'internal_http_port': {'key': 'internalHttpPort', 'type': 'int'},
'internal_https_port': {'key': 'internalHttpsPort', 'type': 'int'},
'in_use': {'key': 'inUse', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(VirtualIPMapping, self).__init__(**kwargs)
self.virtual_ip = kwargs.get('virtual_ip', None)
self.internal_http_port = kwargs.get('internal_http_port', None)
self.internal_https_port = kwargs.get('internal_https_port', None)
self.in_use = kwargs.get('in_use', None)
class VirtualNetworkProfile(Model):
"""Specification for using a Virtual Network.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource id of the Virtual Network.
:type id: str
:ivar name: Name of the Virtual Network (read-only).
:vartype name: str
:ivar type: Resource type of the Virtual Network (read-only).
:vartype type: str
:param subnet: Subnet within the Virtual Network.
:type subnet: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'subnet': {'key': 'subnet', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VirtualNetworkProfile, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = None
self.type = None
self.subnet = kwargs.get('subnet', None)
class VnetGateway(ProxyOnlyResource):
"""The Virtual Network gateway contract. This is used to give the Virtual
Network gateway access to the VPN package.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_name: The Virtual Network name.
:type vnet_name: str
:param vpn_package_uri: Required. The URI where the VPN package can be
downloaded.
:type vpn_package_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'vpn_package_uri': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'vpn_package_uri': {'key': 'properties.vpnPackageUri', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VnetGateway, self).__init__(**kwargs)
self.vnet_name = kwargs.get('vnet_name', None)
self.vpn_package_uri = kwargs.get('vpn_package_uri', None)
class VnetInfo(ProxyOnlyResource):
"""Virtual Network information contract.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_resource_id: The Virtual Network's resource ID.
:type vnet_resource_id: str
:ivar cert_thumbprint: The client certificate thumbprint.
:vartype cert_thumbprint: str
:param cert_blob: A certificate file (.cer) blob containing the public key
of the private key used to authenticate a
Point-To-Site VPN connection.
:type cert_blob: str
:ivar routes: The routes that this Virtual Network connection uses.
:vartype routes: list[~azure.mgmt.web.models.VnetRoute]
:ivar resync_required: <code>true</code> if a resync is required;
otherwise, <code>false</code>.
:vartype resync_required: bool
:param dns_servers: DNS servers to be used by this Virtual Network. This
should be a comma-separated list of IP addresses.
:type dns_servers: str
:param is_swift: Flag that is used to denote if this is VNET injection
:type is_swift: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'cert_thumbprint': {'readonly': True},
'routes': {'readonly': True},
'resync_required': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_resource_id': {'key': 'properties.vnetResourceId', 'type': 'str'},
'cert_thumbprint': {'key': 'properties.certThumbprint', 'type': 'str'},
'cert_blob': {'key': 'properties.certBlob', 'type': 'str'},
'routes': {'key': 'properties.routes', 'type': '[VnetRoute]'},
'resync_required': {'key': 'properties.resyncRequired', 'type': 'bool'},
'dns_servers': {'key': 'properties.dnsServers', 'type': 'str'},
'is_swift': {'key': 'properties.isSwift', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(VnetInfo, self).__init__(**kwargs)
self.vnet_resource_id = kwargs.get('vnet_resource_id', None)
self.cert_thumbprint = None
self.cert_blob = kwargs.get('cert_blob', None)
self.routes = None
self.resync_required = None
self.dns_servers = kwargs.get('dns_servers', None)
self.is_swift = kwargs.get('is_swift', None)
class VnetParameters(ProxyOnlyResource):
"""The required set of inputs to validate a VNET.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_resource_group: The Resource Group of the VNET to be validated
:type vnet_resource_group: str
:param vnet_name: The name of the VNET to be validated
:type vnet_name: str
:param vnet_subnet_name: The subnet name to be validated
:type vnet_subnet_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_resource_group': {'key': 'properties.vnetResourceGroup', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'vnet_subnet_name': {'key': 'properties.vnetSubnetName', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VnetParameters, self).__init__(**kwargs)
self.vnet_resource_group = kwargs.get('vnet_resource_group', None)
self.vnet_name = kwargs.get('vnet_name', None)
self.vnet_subnet_name = kwargs.get('vnet_subnet_name', None)
class VnetRoute(ProxyOnlyResource):
"""Virtual Network route contract used to pass routing information for a
Virtual Network.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param start_address: The starting address for this route. This may also
include a CIDR notation, in which case the end address must not be
specified.
:type start_address: str
:param end_address: The ending address for this route. If the start
address is specified in CIDR notation, this must be omitted.
:type end_address: str
:param route_type: The type of route this is:
DEFAULT - By default, every app has routes to the local address ranges
specified by RFC1918
INHERITED - Routes inherited from the real Virtual Network routes
STATIC - Static route set on the app only
These values will be used for syncing an app's routes with those from a
Virtual Network. Possible values include: 'DEFAULT', 'INHERITED', 'STATIC'
:type route_type: str or ~azure.mgmt.web.models.RouteType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'start_address': {'key': 'properties.startAddress', 'type': 'str'},
'end_address': {'key': 'properties.endAddress', 'type': 'str'},
'route_type': {'key': 'properties.routeType', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VnetRoute, self).__init__(**kwargs)
self.start_address = kwargs.get('start_address', None)
self.end_address = kwargs.get('end_address', None)
self.route_type = kwargs.get('route_type', None)
class VnetValidationFailureDetails(ProxyOnlyResource):
"""A class that describes the reason for a validation failure.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param failed: A flag describing whether or not validation failed.
:type failed: bool
:param failed_tests: A list of tests that failed in the validation.
:type failed_tests: list[~azure.mgmt.web.models.VnetValidationTestFailure]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'failed': {'key': 'properties.failed', 'type': 'bool'},
'failed_tests': {'key': 'properties.failedTests', 'type': '[VnetValidationTestFailure]'},
}
def __init__(self, **kwargs):
super(VnetValidationFailureDetails, self).__init__(**kwargs)
self.failed = kwargs.get('failed', None)
self.failed_tests = kwargs.get('failed_tests', None)
class VnetValidationTestFailure(ProxyOnlyResource):
"""A class that describes a test that failed during NSG and UDR validation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param test_name: The name of the test that failed.
:type test_name: str
:param details: The details of what caused the failure, e.g. the blocking
rule name, etc.
:type details: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'test_name': {'key': 'properties.testName', 'type': 'str'},
'details': {'key': 'properties.details', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VnetValidationTestFailure, self).__init__(**kwargs)
self.test_name = kwargs.get('test_name', None)
self.details = kwargs.get('details', None)
class WebAppCollection(Model):
"""Collection of App Service apps.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.models.Site]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Site]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(self, **kwargs):
super(WebAppCollection, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = None
class WebJob(ProxyOnlyResource):
"""Web Job Information.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param web_job_type: Job type. Possible values include: 'Continuous',
'Triggered'
:type web_job_type: str or ~azure.mgmt.web.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, object]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'run_command': {'key': 'properties.run_command', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extra_info_url', 'type': 'str'},
'web_job_type': {'key': 'properties.web_job_type', 'type': 'WebJobType'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.using_sdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(self, **kwargs):
super(WebJob, self).__init__(**kwargs)
self.run_command = kwargs.get('run_command', None)
self.url = kwargs.get('url', None)
self.extra_info_url = kwargs.get('extra_info_url', None)
self.web_job_type = kwargs.get('web_job_type', None)
self.error = kwargs.get('error', None)
self.using_sdk = kwargs.get('using_sdk', None)
self.settings = kwargs.get('settings', None)
class WorkerPool(Model):
"""Worker pool of an App Service Environment.
Variables are only populated by the server, and will be ignored when
sending a request.
:param worker_size_id: Worker size ID for referencing this worker pool.
:type worker_size_id: int
:param compute_mode: Shared or dedicated app hosting. Possible values
include: 'Shared', 'Dedicated', 'Dynamic'
:type compute_mode: str or ~azure.mgmt.web.models.ComputeModeOptions
:param worker_size: VM size of the worker pool instances.
:type worker_size: str
:param worker_count: Number of instances in the worker pool.
:type worker_count: int
:ivar instance_names: Names of all instances in the worker pool (read
only).
:vartype instance_names: list[str]
"""
_validation = {
'instance_names': {'readonly': True},
}
_attribute_map = {
'worker_size_id': {'key': 'workerSizeId', 'type': 'int'},
'compute_mode': {'key': 'computeMode', 'type': 'ComputeModeOptions'},
'worker_size': {'key': 'workerSize', 'type': 'str'},
'worker_count': {'key': 'workerCount', 'type': 'int'},
'instance_names': {'key': 'instanceNames', 'type': '[str]'},
}
def __init__(self, **kwargs):
super(WorkerPool, self).__init__(**kwargs)
self.worker_size_id = kwargs.get('worker_size_id', None)
self.compute_mode = kwargs.get('compute_mode', None)
self.worker_size = kwargs.get('worker_size', None)
self.worker_count = kwargs.get('worker_count', None)
self.instance_names = None
class WorkerPoolResource(ProxyOnlyResource):
"""Worker pool of an App Service Environment ARM resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param worker_size_id: Worker size ID for referencing this worker pool.
:type worker_size_id: int
:param compute_mode: Shared or dedicated app hosting. Possible values
include: 'Shared', 'Dedicated', 'Dynamic'
:type compute_mode: str or ~azure.mgmt.web.models.ComputeModeOptions
:param worker_size: VM size of the worker pool instances.
:type worker_size: str
:param worker_count: Number of instances in the worker pool.
:type worker_count: int
:ivar instance_names: Names of all instances in the worker pool (read
only).
:vartype instance_names: list[str]
:param sku:
:type sku: ~azure.mgmt.web.models.SkuDescription
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'instance_names': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'worker_size_id': {'key': 'properties.workerSizeId', 'type': 'int'},
'compute_mode': {'key': 'properties.computeMode', 'type': 'ComputeModeOptions'},
'worker_size': {'key': 'properties.workerSize', 'type': 'str'},
'worker_count': {'key': 'properties.workerCount', 'type': 'int'},
'instance_names': {'key': 'properties.instanceNames', 'type': '[str]'},
'sku': {'key': 'sku', 'type': 'SkuDescription'},
}
def __init__(self, **kwargs):
super(WorkerPoolResource, self).__init__(**kwargs)
self.worker_size_id = kwargs.get('worker_size_id', None)
self.compute_mode = kwargs.get('compute_mode', None)
self.worker_size = kwargs.get('worker_size', None)
self.worker_count = kwargs.get('worker_count', None)
self.instance_names = None
self.sku = kwargs.get('sku', None)
| [
"zikalino@microsoft.com"
] | zikalino@microsoft.com |
77a963659c22bc78b97bc981518926205e92bd30 | bcb8337b488f6acb91b700a2312a5b2018855413 | /federatedml/optim/test/updater_test.py | f2d3e05532916611ca665287568914e038b999cc | [
"Apache-2.0"
] | permissive | freeeedooom/FATE | d17a4729f059cfec6bc07c3142bebcd3b470dc3c | 7bbce8ee037543f280791378681742b40a300b0a | refs/heads/master | 2020-08-19T12:15:22.517131 | 2019-10-17T07:09:22 | 2019-10-17T07:09:22 | 215,918,890 | 1 | 0 | Apache-2.0 | 2019-10-18T01:45:43 | 2019-10-18T01:45:43 | null | UTF-8 | Python | false | false | 2,113 | py | #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# from arch.api import eggroll
from federatedml.optim import L1Updater
from federatedml.optim import L2Updater
import numpy as np
import unittest
class TestUpdater(unittest.TestCase):
def setUp(self):
alpha = 0.5
learning_rate = 0.1
self.l1_updater = L1Updater(alpha, learning_rate)
self.l2_updater = L2Updater(alpha, learning_rate)
self.coef_ = np.array([1, -2, 3, -4, 5, -6, 7, -8, 9])
self.gradient = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1])
# l2 regular
self.l2_loss_norm = 0.5 * alpha * np.sum(np.array([i * i for i in self.coef_]))
self.l2_update_coef = self.coef_ - self.gradient - learning_rate * alpha * self.coef_
# l1 regular
self.l1_loss_norm = 22.5
self.l1_update_coef = [0, -2.95, 1.95, -4.95, 3.95, -6.95, 5.95, -8.95, 7.95]
def test_l2updater(self):
loss_norm = self.l2_updater.loss_norm(self.coef_)
self.assertEqual(loss_norm, self.l2_loss_norm)
l2_update_coef = self.l2_updater.update_coef(self.coef_, self.gradient)
self.assertListEqual(list(l2_update_coef), list(self.l2_update_coef))
def test_l1updater(self):
loss_norm = self.l1_updater.loss_norm(self.coef_)
self.assertEqual(loss_norm, self.l1_loss_norm)
l1_update_coef = self.l1_updater.update_coef(self.coef_, self.gradient)
self.assertListEqual(list(l1_update_coef), list(self.l1_update_coef))
if __name__ == "__main__":
unittest.main()
| [
"jicezeng@gmail.com"
] | jicezeng@gmail.com |
5904a77dcaa3f8739c2f3bf5900512b189487013 | 81f2d4aa3bfb216e04efec81c7f614603a8fd384 | /irekua/selia/urls/collections_admin.py | af1faca4aa8db97339d9dd03b7f487e21ecce3c1 | [] | no_license | CONABIO-audio/irekua | 44564020c342e8bd49a14707f206962869bc026d | 4531a6dbb8b0a0014567930a134bc4399c2c00d4 | refs/heads/master | 2022-12-10T09:43:05.866848 | 2019-10-17T16:18:21 | 2019-10-17T16:18:21 | 170,434,169 | 0 | 1 | null | 2022-12-08T01:44:04 | 2019-02-13T03:32:55 | Python | UTF-8 | Python | false | false | 350 | py | from django.urls import path
from selia.views import collections_admin
urlpatterns = [
path(
'manage/',
collections_admin.Home.as_view(),
name='collections_manager_home'),
path(
'manage/managed_collections/',
collections_admin.ManagedCollectionsView.as_view(),
name='managed_collections'),
]
| [
"santiago.mbal@gmail.com"
] | santiago.mbal@gmail.com |
dd8d18efbc81be9cc56316bfb9bd50f9618a5a05 | b5826ee957641a233e5b100e19e0fbdc0e8a0f6e | /fundingharvest/dao.py | 7639f4eec0c7ef95771ab4c4868e043984d9b3b0 | [] | no_license | emanuil-tolev/fundingharvest | 1f610534cd7e8e7a64a65489c588d5d968559d78 | 00b49f708605d1f59eac5b5e21c55cd25e15da17 | refs/heads/master | 2016-09-11T02:09:45.387114 | 2014-04-29T15:28:35 | 2014-04-29T15:28:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,654 | py | import json
import uuid
import UserDict
import httplib
from datetime import datetime
import pyes
from fundingharvest.config import config
def init_db():
conn, db = get_conn()
try:
conn.create_index(db)
except pyes.exceptions.IndexAlreadyExistsException:
pass
def get_conn():
host = "127.0.0.1:9200"
db_name = "fundfind"
# host = config["ELASTIC_SEARCH_HOST"]
# db_name = config["ELASTIC_SEARCH_DB"]
# print host, db_name
conn = pyes.ES([host])
return conn, db_name
class DomainObject(UserDict.IterableUserDict):
# set __type__ on inheriting class to determine elasticsearch object
__type__ = None
def __init__(self, **kwargs):
'''Initialize a domain object with key/value pairs of attributes.
'''
# IterableUserDict expects internal dictionary to be on data attribute
self.data = dict(kwargs)
@property
def id(self):
'''Get id of this object.'''
return self.data.get('id', None)
def save(self):
'''Save to backend storage.'''
# TODO: refresh object with result of save
if 'modified' in self.data:
self.data['modified'] = datetime.now().isoformat()
return self.upsert(self.data)
@classmethod
def pull(cls, id_):
'''Retrieve object by id.'''
conn, db = get_conn()
out = conn.get(db, cls.__type__, id_)
return cls(**out['_source'])
@classmethod
def delete(cls, id_):
'''Delete object by id.'''
conn, db = get_conn()
out = conn.delete(db, cls.__type__, id_)
return cls(out['_source']['ok'])
@classmethod
def upsert(cls, data):
'''Update backend object with a dictionary of data.
If no id is supplied an uuid id will be created before saving.'''
conn, db = get_conn()
if 'id' in data:
id_ = data['id']
else:
id_ = uuid.uuid4().hex
data['id'] = id_
if 'created' not in data and 'modified' not in data:
data['created'] = datetime.now().isoformat()
data['modified'] = datetime.now().isoformat()
conn.index(data, db, cls.__type__, id_)
return cls(**data)
@classmethod
def delete_by_query(cls, query):
url = "127.0.0.1:9200"
loc = fundfind + "/" + cls.__type__ + "/_query?q=" + query
conn = httplib.HTTPConnection(url)
conn.request('DELETE', loc)
resp = conn.getresponse()
return resp.read()
@classmethod
def query(cls, q='', terms=None, facet_fields=None, flt=False, **kwargs):
'''Perform a query on backend.
:param q: maps to query_string parameter.
:param terms: dictionary of terms to filter on. values should be lists.
:param facet_fields: we need a proper comment on this TODO
:param kwargs: any keyword args as per
http://www.elasticsearch.org/guide/reference/api/search/uri-request.html
'''
conn, db = get_conn()
if not q:
ourq = pyes.query.MatchAllQuery()
else:
if flt:
ourq = pyes.query.FuzzyLikeThisQuery(like_text=q,**kwargs)
else:
ourq = pyes.query.StringQuery(q, default_operator='AND')
if terms:
for term in terms:
for val in terms[term]:
termq = pyes.query.TermQuery(term, val)
ourq = pyes.query.BoolQuery(must=[ourq,termq])
ourq = ourq.search(**kwargs)
if facet_fields:
for item in facet_fields:
ourq.facet.add_term_facet(item['key'], size=item.get('size',100), order=item.get('order',"count"))
out = conn.search(ourq, db, cls.__type__)
return out
@classmethod
def raw_query(self, query_string):
if not query_string:
msg = json.dumps({
'error': "Query endpoint. Please provide elastic search query parameters - see http://www.elasticsearch.org/guide/reference/api/search/uri-request.html"
})
return msg
host = "127.0.0.1:9200"
db_path = "fundfind"
fullpath = '/' + db_path + '/' + self.__type__ + '/_search' + '?' + query_string
c = httplib.HTTPConnection(host)
c.request('GET', fullpath)
result = c.getresponse()
# pass through the result raw
return result.read()
class Funder(DomainObject):
__type__ = 'funder'
class FundingOpp(DomainObject):
__type__ = 'funding_opportunity' | [
"emanuil.tolev@gmail.com"
] | emanuil.tolev@gmail.com |
d28c1620b4795e92a88c32d852a82433a3acf006 | 711c11d0111a40055ba110e7089a231c2ba42b8e | /toontown/ai/ServiceStart.py | 113573dab399c4070258ce5574e020998d3f0c1c | [
"Apache-2.0"
] | permissive | DeadMemez/ProjectAltis-OldAcornAcres | 03c8dc912ecccae8456d89790f6b332547b75cc3 | e8e0087389933795973e566782affcaec65a2980 | refs/heads/master | 2021-01-19T13:59:07.234192 | 2017-08-20T14:41:45 | 2017-08-20T14:41:45 | 100,869,782 | 0 | 2 | null | 2017-08-20T15:14:35 | 2017-08-20T15:14:35 | null | UTF-8 | Python | false | false | 2,621 | py | import __builtin__
__builtin__.process = 'ai'
# Temporary hack patch:
__builtin__.__dict__.update(__import__('pandac.PandaModules', fromlist=['*']).__dict__)
from direct.extensions_native import HTTPChannel_extensions
from toontown.toonbase import ToonPythonUtil as PythonUtil
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--base-channel', help='The base channel that the server may use.')
parser.add_argument('--max-channels', help='The number of channels the server may use.')
parser.add_argument('--stateserver', help="The control channel of this AI's designated State Server.")
parser.add_argument('--district-name', help="What this AI Server's district will be named.")
parser.add_argument('--astron-ip', help="The IP address of the Astron Message Director to connect to.")
parser.add_argument('--eventlogger-ip', help="The IP address of the Astron Event Logger to log to.")
parser.add_argument('--start-time', help="The time of day to start at")
parser.add_argument('config', nargs='*', default=['config/general.prc', 'config/release/dev.prc'], help="PRC file(s) to load.")
args = parser.parse_args()
for prc in args.config:
loadPrcFile(prc)
localconfig = ''
if args.base_channel: localconfig += 'air-base-channel %s\n' % args.base_channel
if args.max_channels: localconfig += 'air-channel-allocation %s\n' % args.max_channels
if args.stateserver: localconfig += 'air-stateserver %s\n' % args.stateserver
if args.district_name: localconfig += 'district-name %s\n' % args.district_name
if args.astron_ip: localconfig += 'air-connect %s\n' % args.astron_ip
if args.eventlogger_ip: localconfig += 'eventlog-host %s\n' % args.eventlogger_ip
if args.start_time: localconfig += 'start-time %s\n' % args.start_time
loadPrcFileData('Command-line', localconfig)
from otp.ai.AIBaseGlobal import *
from toontown.ai.ToontownAIRepository import ToontownAIRepository
simbase.air = ToontownAIRepository(config.GetInt('air-base-channel', 401000000),
config.GetInt('air-stateserver', 4002),
config.GetString('district-name', 'Devhaven'),
config.GetInt('start-time', 6))
host = config.GetString('air-connect', '127.0.0.1')
port = 7100
if ':' in host:
host, port = host.split(':', 1)
port = int(port)
simbase.air.connect(host, port)
try:
run()
except SystemExit:
raise
except Exception:
info = PythonUtil.describeException()
simbase.air.writeServerEvent('ai-exception', simbase.air.getAvatarIdFromSender(), simbase.air.getAccountIdFromSender(), info)
raise
| [
"tewtow5@gmail.com"
] | tewtow5@gmail.com |
d1721732a4cb0290b1e1c779d98ffbd556b00350 | c24ad19b65992dd2be3d3210b889d970e43b9cdc | /class/phase2/RE/exercise.py | ea6935386b44c7c9c4ee02508bb9a92003d42f16 | [] | no_license | ivoryli/myproject | 23f39449a0bd23abcc3058c08149cebbfd787d12 | cebfa2594198060d3f8f439c971864e0639bbf7e | refs/heads/master | 2020-05-30T06:25:41.345645 | 2019-05-31T11:15:55 | 2019-05-31T11:15:55 | 189,578,491 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,634 | py | import sys
import re
# port = sys.argv[1]
# fd = open("1.txt")
# while True:
# data = ''
# for line in fd:
# if line != '\n':
# data += line
# else:
# break
# # print(">>>s>>>>",data)
#
# #匹配字符串首个单词
# key_word = re.match(r'\S+',data).group()
# if key_word == port:
# #匹配目标内容
# pattern = r"[0-9a-f]{4}\.[0-9a-f]{4}\.[0-9a-f]{4}"
# pattern1 = r"(\d{1,3}\.){3}\d{1,3}/\d+|Unknow"
# try:
# address = re.search(pattern,data).group()
# print(address)
# address1 = re.search(pattern1,data).group()
# print(address1)
# except:
# print("No address")
# break
# if not data:
# print("Not Found the %s"%port)
# break
# myself
# cmd = sys.argv[1]
# fd = open("./RE/1.txt")
# s = ''
# while True:
# data = fd.read(1024)
# if not data:
# break
# s += data
#
# fd.close()
# L = s.split("\n")
# # print(L)
# flag = False
# for item in L:
# if not flag:
# start = re.findall("^\S+",item)
# # print(start[0])
# try:
# if start[0] == cmd:
# flag = True
# except Exception:
# continue
# if flag:
# # print(item)
# #findall里不能用(),代表第几项
# # address = re.findall(r"(\d{1,3}\.){3}\d{1,3}/\d+",item)
# address = re.search(r"(\d{1,3}\.){3}\d{1,3}/\d+",item)
# if address:
# address = address.group()
# if address:
# print(address)
# break
| [
"2712455490@qq.com"
] | 2712455490@qq.com |
1fa00ffbfd674b2cd55fb92276e5f8116a643b10 | c26332e2b12888069efe3664999a314b3bd56d7d | /backend/mobile_122_dev_5929/wsgi.py | 6b6ae74d42b50e58a4ec368459dab5d82879d108 | [] | no_license | crowdbotics-apps/mobile-122-dev-5929 | b7dee33e9c4492567601113440d541e2e101a552 | f51b63024c7c5bbfa7a4d99626c2019ac8408bf3 | refs/heads/master | 2022-10-11T09:56:45.538625 | 2020-06-12T05:25:16 | 2020-06-12T05:25:16 | 271,715,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py | """
WSGI config for mobile_122_dev_5929 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mobile_122_dev_5929.settings')
application = get_wsgi_application()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
2c0191293e8c452ed7859c3cde2961e80e81d050 | 030cea4006a4ff559f23cb3b3c31cd038ed2e332 | /week8/informatics/array_f.py | 0231e876ee6b041411b446562543e4a28a75e84b | [] | no_license | ayananygmetova/Web-Dev-2020 | f8834e0ee26f0f0f06d0e3a282c73b373954a430 | 957bca91554f015e9a3d13b4ec12e64de7ac633e | refs/heads/master | 2023-01-22T16:49:39.857983 | 2020-03-31T10:09:54 | 2020-03-31T10:09:54 | 236,937,810 | 1 | 0 | null | 2023-01-07T16:34:35 | 2020-01-29T08:41:10 | Python | UTF-8 | Python | false | false | 380 | py | <<<<<<< HEAD
n = int(input())
nums = [int(i) for i in input().split()]
cnt=0
for i in range(1,n-1):
if nums[i]>nums[i-1] and nums[i]>nums[i+1]:
cnt+=1
=======
n = int(input())
nums = [int(i) for i in input().split()]
cnt=0
for i in range(1,n-1):
if nums[i]>nums[i-1] and nums[i]>nums[i+1]:
cnt+=1
>>>>>>> 02dce2f7d1883584c5b5f3cac5f0e37321f79bfe
print(cnt) | [
"ayananyfmetova@gmail.com"
] | ayananyfmetova@gmail.com |
03b2c7c933d5a7132ca19ec39915b583600ab8e9 | f13acd0d707ea9ab0d2f2f010717b35adcee142f | /ABC/abc201-abc250/abc249/a/main.py | 865b1e51de55bf7b935c7adf0cc1a54134c2cbb1 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | KATO-Hiro/AtCoder | 126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7 | bf43320bc1af606bfbd23c610b3432cddd1806b9 | refs/heads/master | 2023-08-18T20:06:42.876863 | 2023-08-17T23:45:21 | 2023-08-17T23:45:21 | 121,067,516 | 4 | 0 | CC0-1.0 | 2023-09-14T21:59:38 | 2018-02-11T00:32:45 | Python | UTF-8 | Python | false | false | 508 | py | # -*- coding: utf-8 -*-
def main():
import sys
input = sys.stdin.readline
a, b, c, d, e, f, x = map(int, input().split())
dist_t, dist_a = 0, 0
p1, q1 = divmod(x, a + c)
dist_t = a * p1 * b
dist_t += min(q1, a) * b
p2, q2 = divmod(x, d + f)
dist_a = d * p2 * e
dist_a += min(q2, d) * e
if dist_t > dist_a:
print('Takahashi')
elif dist_t < dist_a:
print('Aoki')
else:
print('Draw')
if __name__ == "__main__":
main()
| [
"k.hiro1818@gmail.com"
] | k.hiro1818@gmail.com |
c0467c1f1792cf8edb3adc594aa6a34ac8af51cf | 0ed63027e39120c8f0ca355aa5abbeaee42104f4 | /dex_api_python/models/post_deposit_body.py | dd59b63647579ac1cbcf29c748c3ce392528a6e6 | [] | no_license | Felix-Fenix/dex-api-python | 592f4737936fbc1968e7c3725bf2ecfda607ff9a | bb5a2d86d476ab3c080383ad5197ed5f116712ed | refs/heads/master | 2022-08-24T16:22:28.795164 | 2020-05-26T07:11:34 | 2020-05-26T07:11:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,475 | py | # coding: utf-8
"""
CET-Lite for CoinEx Chain
A REST interface for state queries, transaction generation and broadcasting. # noqa: E501
OpenAPI spec version: 3.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class PostDepositBody(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'base_req': 'BaseReq',
'depositor': 'Address',
'amount': 'list[Coin]'
}
attribute_map = {
'base_req': 'base_req',
'depositor': 'depositor',
'amount': 'amount'
}
def __init__(self, base_req=None, depositor=None, amount=None): # noqa: E501
"""PostDepositBody - a model defined in Swagger""" # noqa: E501
self._base_req = None
self._depositor = None
self._amount = None
self.discriminator = None
if base_req is not None:
self.base_req = base_req
if depositor is not None:
self.depositor = depositor
if amount is not None:
self.amount = amount
@property
def base_req(self):
"""Gets the base_req of this PostDepositBody. # noqa: E501
:return: The base_req of this PostDepositBody. # noqa: E501
:rtype: BaseReq
"""
return self._base_req
@base_req.setter
def base_req(self, base_req):
"""Sets the base_req of this PostDepositBody.
:param base_req: The base_req of this PostDepositBody. # noqa: E501
:type: BaseReq
"""
self._base_req = base_req
@property
def depositor(self):
"""Gets the depositor of this PostDepositBody. # noqa: E501
:return: The depositor of this PostDepositBody. # noqa: E501
:rtype: Address
"""
return self._depositor
@depositor.setter
def depositor(self, depositor):
"""Sets the depositor of this PostDepositBody.
:param depositor: The depositor of this PostDepositBody. # noqa: E501
:type: Address
"""
self._depositor = depositor
@property
def amount(self):
"""Gets the amount of this PostDepositBody. # noqa: E501
:return: The amount of this PostDepositBody. # noqa: E501
:rtype: list[Coin]
"""
return self._amount
@amount.setter
def amount(self, amount):
"""Sets the amount of this PostDepositBody.
:param amount: The amount of this PostDepositBody. # noqa: E501
:type: list[Coin]
"""
self._amount = amount
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PostDepositBody, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PostDepositBody):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"huahui.gao@matrixport.com"
] | huahui.gao@matrixport.com |
722f1d36546973f8c17497415915f509abc74020 | e5202e0f36c15b8898920a461a866168fa059947 | /clirad/co2_0.0004/band_11/atmpro_mls/cliradlw_523cbb7/param.py | a462e32e4dffdd5644dd1c546871f04e88f215f5 | [] | no_license | qAp/analysis_-_new_kdist_param | 653c9873751646f6fa9481544e98ed6065a16155 | 272dc3667030cdb18664108d0bd78fee03736144 | refs/heads/master | 2021-06-11T04:21:35.105924 | 2019-08-04T13:13:07 | 2019-08-04T13:13:07 | 136,108,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 565 | py | PARAM = {'commitnumber': '523cbb7', 'molecule': {'co2': 0.0004}, 'band': [11], 'atmpro': 'mls', 'tsfc': 294}
PARAM_LBLNEW = {'atmpro': 'mls', 'band': '9', 'commitnumber': 'a22ab94', 'conc': 0.0004, 'dv': 0.001, 'klin': 6.5e-24, 'molecule': 'co2', 'ng_adju': [0, 0], 'ng_refs': [3, 3], 'nv': 1000, 'option_compute_btable': 0, 'option_compute_ktable': 0, 'option_wgt_flux': 1, 'option_wgt_k': 1, 'ref_pts': [(1, 250), (50, 250)], 'tsfc': 294, 'vmax': 3000, 'vmin': 1900, 'w_diffuse': [(1.66, 1.66, 1.75), (1.75, 1.6, 1.85)], 'wgt': [(0.7, 0.8, 0.7), (0.8, 0.7, 0.8)]} | [
"llacque@gmail.com"
] | llacque@gmail.com |
fa3aa2d7d1b50914f148e89a4638fb89c4c58898 | ebbaa59b07f170e7fcff3f97c1ac17d9c8f70dbf | /cars_server/settings.py | 39000cbcfaf46dc699f4c8adf566bc0ec155a75c | [] | no_license | sudhanshuchopra/cars_server | fe22e89a7d08c8d34da3d86e18d73e5f1e7c6368 | 40fe65debb10745bbe6ba1ca9a58576983f47caa | refs/heads/master | 2021-01-12T04:19:29.460116 | 2016-12-29T10:55:19 | 2016-12-29T10:55:19 | 77,587,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,234 | py | """
Django settings for cars_server project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'o7vmpy9jt8dqfy#x867=rjd&)!c3vi-hef*a0al41gqnlq8o5w'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'cars'
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'cars_server.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'cars_server.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Kolkata'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
| [
"sudhanshuccp@gmail.com"
] | sudhanshuccp@gmail.com |
e531c93400227515a0cff8a32d5fd493da27819d | 3327a87cefa2275bd0ba90a500444f3494b14fdf | /bwu/array/229-majority-element-ii.py | 97f32d59b609136a9b16159f5b89aaa2dc146d20 | [] | no_license | captainhcg/leetcode-in-py-and-go | e1b56f4228e0d60feff8f36eb3d457052a0c8d61 | 88a822c48ef50187507d0f75ce65ecc39e849839 | refs/heads/master | 2021-06-09T07:27:20.358074 | 2017-01-07T00:23:10 | 2017-01-07T00:23:10 | 61,697,502 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | class Solution(object):
def majorityElement(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
if not nums:
return []
count1, count2, cand1, cand2 = 0, 0, 0, 1
for n in nums:
if n == cand1:
count1 += 1
elif n == cand2:
count2 += 1
elif count1 == 0:
cand1, count1 = n, 1
elif count2 == 0:
cand2, count2 = n, 1
else:
count1 -= 1
count2 -= 1
return [i for i in (cand1, cand2) if nums.count(i) > len(nums) // 3]
| [
"noreply@github.com"
] | captainhcg.noreply@github.com |
e2cc5bec1c205a1ac7954da2c42e2c572adc1402 | 6fc061385b59852d86a6d00a7c1ae9136d072d11 | /localpubsub/localpubsub.py | 78ee2d6c65d2f8dabba58969685173485f7053d8 | [
"MIT"
] | permissive | SimLeek/localpubsubs | d07c9d31be63761fd70ba37e6acb5ba36dfa53ed | 2e2351e4047ace6f68ce2217fe2222f1f18aec09 | refs/heads/master | 2020-04-18T01:28:13.496094 | 2019-03-10T23:59:31 | 2019-03-10T23:59:31 | 167,119,863 | 0 | 0 | MIT | 2019-03-10T23:59:32 | 2019-01-23T04:57:06 | Python | UTF-8 | Python | false | false | 2,216 | py | from threading import Lock
if False:
from typing import Dict
class NoData(object):
pass
def _args_for_lock(blocking, timeout):
if blocking:
args = (blocking, timeout)
else:
args = (blocking,)
return args
class VariableSub(object):
def __init__(self, pub):
self.pub = pub
self.lock = Lock()
self.return_on_no_data = NoData()
self.lock.acquire()
self.name = hash(self)
def get(self, blocking=False, timeout=0.0):
if self.lock.acquire(*_args_for_lock(blocking, timeout)):
data = self.pub.get_data(blocking, timeout)
else:
data = self.return_on_no_data
return data
def release(self):
try:
try:
del self.pub.subs[self.name]
except KeyError:
pass
self.lock.release()
except RuntimeError:
pass
def __del__(self):
self.release()
def __exit__(self, exc_type, exc_val, exc_tb):
self.release()
def __aexit__(self, exc_type, exc_val, exc_tb):
self.release()
class VariablePub(object):
def __init__(self):
self.subs = {} # type: Dict[VariableSub]
self.__data = None
self.__write_lock = Lock()
def make_sub(self): # type: ()->VariableSub
sub = VariableSub(self)
self.subs[sub.name] = sub
return sub
def publish(self, data, blocking=True, timeout=0.0, force_all_read=False):
self.__set_data(data, blocking, timeout)
for sub in self.subs.values():
try:
sub.lock.release()
except RuntimeError:
pass
def get_data(self, blocking=False, timeout=0.0):
self.__write_lock.acquire(*_args_for_lock(blocking, timeout))
try:
self.__write_lock.release()
except RuntimeError:
pass
return self.__data
def __set_data(self, new_data, blocking=True, timeout=0.0):
self.__write_lock.acquire(*_args_for_lock(blocking, timeout))
self.__data = new_data
try:
self.__write_lock.release()
except RuntimeError:
pass
| [
"josh.miklos@gmail.com"
] | josh.miklos@gmail.com |
412d4c1d26dc9b4425b7ac7a327faca556197f81 | 61587071621eefd6dd0f549d618753d769eededc | /src/pages/alagoas/novoextra.py | e16386b7609ff56c1b43bfa185920200c19dd23d | [] | no_license | diegothuran/rss3 | df1e9e07b0552eb8bdbe6131235df3b991ddbabc | 3177cf768047ced21e38a060d2306cd028e752fb | refs/heads/master | 2022-12-07T19:53:14.609867 | 2019-02-10T20:15:13 | 2019-02-10T20:15:13 | 145,567,411 | 0 | 0 | null | 2022-11-22T02:50:19 | 2018-08-21T13:25:00 | Python | UTF-8 | Python | false | false | 1,117 | py | # coding: utf-8
import sys
sys.path.insert(0, '../../src')
from bs4 import BeautifulSoup
import requests
GLOBAL_RANK = 590712
RANK_BRAZIL = 32566
NAME = 'novoextra.com.br'
def get_urls():
try:
urls = []
root = 'https://novoextra.com.br'
links = [
'https://novoextra.com.br/so-no-site/alagoas',
'https://novoextra.com.br/so-no-site/geral',
'https://novoextra.com.br/so-no-site/internacional',
'https://novoextra.com.br/so-no-site/nacional',
'https://novoextra.com.br/so-no-site/politica'
]
for link in links:
req = requests.get(link)
noticias = BeautifulSoup(req.text, "html.parser").find_all('div', class_='item')
for noticia in noticias:
href = noticia.find_all('a', href=True)[0]['href']
full_link = root + href
# print(full_link)
urls.append(full_link)
return urls
except:
raise Exception('Exception in novoextra')
| [
"victor.lorena@gmail.com"
] | victor.lorena@gmail.com |
cc308036cdbc4f75519110ea6b6bf1c925d798d2 | 101f209bae06b98f7d0b949c19b02341f0afc280 | /01_HelloWorld/04_django/03_MODEL_PROJECT/real_data/apps.py | 3753550927c54e86a2cc038b3a11bd9c637f445e | [] | no_license | JJayeee/TIL | 71162040e86169d946e1812ef48d7a6faa9ffa5d | 86d954d36ed604f3ee044618421a8a2560fa8d35 | refs/heads/master | 2020-12-20T16:09:00.378294 | 2020-10-11T03:32:54 | 2020-10-11T03:32:54 | 195,916,223 | 2 | 0 | null | 2019-12-09T01:50:04 | 2019-07-09T02:17:39 | Python | UTF-8 | Python | false | false | 92 | py | from django.apps import AppConfig
class RealDataConfig(AppConfig):
name = 'real_data'
| [
"jay.hyundong@gmail.com"
] | jay.hyundong@gmail.com |
ec5cda2e94a7432262ed358abe6b66255554e044 | 825930f372fdf8c9c42cd2f9b1f424ab9de90b38 | /accounts/models.py | 77d49558c6ab9090318df7e4d9bcc8553e09e68f | [] | no_license | Xasanjon/crm2 | 56cbfa05d910144c75a3cdfe7423ba68fd576534 | 52279925e64e4268830fbeae6af897aef14b64d0 | refs/heads/master | 2023-07-02T04:13:33.928305 | 2021-08-16T14:53:43 | 2021-08-16T14:53:43 | 395,755,429 | 0 | 0 | null | 2021-08-16T14:53:44 | 2021-08-13T18:30:32 | Python | UTF-8 | Python | false | false | 1,880 | py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Customer(models.Model):
user = models.OneToOneField(
User, null=True, blank=True, on_delete=models.CASCADE)
name = models.CharField(max_length=200, null=True)
phone = models.CharField(max_length=200, null=True, blank=True)
email = models.CharField(max_length=200, null=True, blank=True)
profile_pic = models.ImageField(
default="profile.png", null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True, null=True)
def __str__(self):
return self.name
class Tag(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return self.name
class Product(models.Model):
CATEGORY = (
('Indoor', 'Indoor'),
('Outdoor', 'Outdoor')
)
name = models.CharField(max_length=200, null=True)
price = models.FloatField(null=True)
category = models.CharField(max_length=200, null=True, choices=CATEGORY)
description = models.TextField(null=True, blank=True)
date_created = models.DateTimeField(auto_now_add=True, null=True)
tag = models.ManyToManyField(Tag)
def __str__(self):
return self.name
class Order(models.Model):
STATUS = (
('Pending', 'Pending'),
('Out for delivery', 'Out for delivery'),
('Deleivered', 'Deleivered')
)
customer = models.ForeignKey(
Customer, null=True, on_delete=models.SET_NULL)
product = models.ForeignKey(Product, null=True, on_delete=models.SET_NULL)
date_created = models.DateTimeField(auto_now_add=True, null=True)
status = models.CharField(max_length=200, null=True, choices=STATUS)
note = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return self.product.name
| [
"xasanboy99akaxon@gmail.com"
] | xasanboy99akaxon@gmail.com |
6c44855d61e430162cf4edf26b96aea7b0cc08be | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_3814.py | d5ee62c94558f8c738e33b38d7038e454fd21acc | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | # PyNotify not working from cron?
00 * * * * myname env DISPLAY=:0 python ~/scripts/script.py
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
f3905abd94b637d98fa7979ff6d9141620545f55 | 0f79fd61dc47fcafe22f83151c4cf5f2f013a992 | /BOJ/11053.py | c97d0ad92ba2aa4c8f3ec542dfb9fe9e3fe02f62 | [] | no_license | sangm1n/problem-solving | 670e119f28b0f0e293dbc98fc8a1aea74ea465ab | bc03f8ea9a6a4af5d58f8c45c41e9f6923f55c62 | refs/heads/master | 2023-04-22T17:56:21.967766 | 2021-05-05T12:34:01 | 2021-05-05T12:34:01 | 282,863,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | """
author : Lee Sang Min
github : https://github.com/sangm1n
e-mail : dltkd96als@naver.com
title : 가장 긴 증가하는 부분 수열
description : Dynamic Programming
"""
N = int(input())
arr = list(map(int, input().split()))
dp = [1] * N
for i in range(N):
for j in range(i+1, N):
if arr[i] < arr[j]:
dp[j] = max(dp[j], dp[i] + 1)
print(max(dp))
| [
"dltkd96als@naver.com"
] | dltkd96als@naver.com |
715e9f56ac3e6ff7f765b338d1357f4f59ecd8f2 | 51a902289b357ad6cf467e51c9740fa2a07c1c1c | /first_sem/lab_5/protect5.py | 4a02342594f304c825d684e7cf4e593a92379b99 | [] | no_license | ivaaahn/bmstu-python | d3910356353b0ab3faab243a53a1d657b7d793ad | 9f2d8eb390d804ad86545c992add2a6d97148a5d | refs/heads/main | 2023-03-24T08:52:12.723117 | 2021-03-16T13:19:46 | 2021-03-16T13:19:46 | 348,353,873 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | py | x, eps = map(float, (input('Через пробел введите x, eps: ')).split(' '))
# S = 0.0
# n = 1
# elNext = x
# k = n + 1
#
# while abs(elNext) > eps:
# S += elNext
# n += 1
# elNext = -elNext * x * x / k / (k + 1)
# k = k + 2
s = 0.0
n = 2
nextEl = x
while abs(nextEl) >= eps:
s += nextEl
nextEl *= - x * x * (n-1) / (n * (n + 1) * (n + 1))
n += 2
print('S = {:.5}'.format(s))
| [
"ivahnencko01@gmail.com"
] | ivahnencko01@gmail.com |
9654adeb6cb6a9aed8c3e0304e89ee23367bafbe | 903a5afa943971c2be30007f0445264d417727e8 | /src/openprocurement/tender/openua/views/complaint_post_document.py | e7012422a55ba07117b724366dade2076a325b34 | [
"Apache-2.0"
] | permissive | pontostroy/api | 39ca2ff864daa9c6b8a96b17c49f6eec2f489398 | 5afdd3a62a8e562cf77e2d963d88f1a26613d16a | refs/heads/master | 2021-03-14T13:11:24.314136 | 2020-06-24T08:34:20 | 2020-06-24T08:34:20 | 246,768,251 | 0 | 0 | Apache-2.0 | 2020-06-24T07:18:10 | 2020-03-12T07:18:07 | Python | UTF-8 | Python | false | false | 5,210 | py | # -*- coding: utf-8 -*-
from openprocurement.api.utils import (
get_file,
upload_file,
update_file_content_type,
json_view,
context_unpack,
APIResource,
)
from openprocurement.api.validation import (
validate_file_update,
validate_file_upload,
validate_patch_document_data,
)
from openprocurement.tender.core.validation import (
validate_complaint_document_update_not_by_author,
)
from openprocurement.tender.core.utils import save_tender, optendersresource, apply_patch
from openprocurement.tender.openua.validation import (
validate_complaint_post_review_date,
validate_complaint_post_complaint_status,
validate_complaint_post_document_upload_by_author,
)
@optendersresource(
name="aboveThresholdUA:Tender Complaint Post Documents",
collection_path="/tenders/{tender_id}/complaints/{complaint_id}/posts/{post_id}/documents",
path="/tenders/{tender_id}/complaints/{complaint_id}/posts/{post_id}/documents/{document_id}",
procurementMethodType="aboveThresholdUA",
description="Tender complaint post documents",
)
class TenderComplaintPostDocumentResource(APIResource):
@json_view(permission="view_tender")
def collection_get(self):
"""
Tender Complaint Post Documents List
"""
if self.request.params.get("all", ""):
collection_data = [i.serialize("view") for i in self.context.documents]
else:
collection_data = sorted(
dict([(i.id, i.serialize("view")) for i in self.context.documents]).values(),
key=lambda i: i["dateModified"],
)
return {"data": collection_data}
@json_view(
validators=(
validate_file_upload,
validate_complaint_post_document_upload_by_author,
validate_complaint_post_complaint_status,
validate_complaint_post_review_date,
),
permission="edit_complaint",
)
def collection_post(self):
"""
Tender Complaint Post Document Upload
"""
document = upload_file(self.request)
document.author = self.request.authenticated_role
self.context.documents.append(document)
if save_tender(self.request):
self.LOGGER.info(
"Created tender complaint post document {}".format(document.id),
extra=context_unpack(
self.request, {"MESSAGE_ID": "tender_complaint_post_document_create"},
{"document_id": document.id}
),
)
self.request.response.status = 201
document_route = self.request.matched_route.name.replace("collection_", "")
self.request.response.headers["Location"] = self.request.current_route_url(
_route_name=document_route, document_id=document.id, _query={}
)
return {"data": document.serialize("view")}
@json_view(permission="view_tender")
def get(self):
"""
Tender Complaint Post Document Read
"""
if self.request.params.get("download"):
return get_file(self.request)
document = self.request.validated["document"]
document_data = document.serialize("view")
document_data["previousVersions"] = [
i.serialize("view") for i in self.request.validated["documents"] if i.url != document.url
]
return {"data": document_data}
@json_view(
validators=(
validate_file_update,
validate_complaint_document_update_not_by_author,
validate_complaint_post_complaint_status,
validate_complaint_post_review_date,
),
permission="edit_complaint",
)
def put(self):
"""
Tender Complaint Post Document Update
"""
document = upload_file(self.request)
document.author = self.request.authenticated_role
self.request.validated["post"].documents.append(document)
if save_tender(self.request):
self.LOGGER.info(
"Updated tender complaint document {}".format(self.request.context.id),
extra=context_unpack(self.request, {"MESSAGE_ID": "tender_complaint_post_document_put"}),
)
return {"data": document.serialize("view")}
@json_view(
content_type="application/json",
validators=(
validate_patch_document_data,
validate_complaint_document_update_not_by_author,
validate_complaint_post_complaint_status,
validate_complaint_post_review_date,
),
permission="edit_complaint",
)
def patch(self):
"""
Tender Complaint Post Document Update
"""
if apply_patch(self.request, src=self.request.context.serialize()):
update_file_content_type(self.request)
self.LOGGER.info(
"Updated tender complaint document {}".format(self.request.context.id),
extra=context_unpack(self.request, {"MESSAGE_ID": "tender_complaint_post_document_patch"}),
)
return {"data": self.request.context.serialize("view")}
| [
"smithumble@gmail.com"
] | smithumble@gmail.com |
ccd648d652a9be83c5c2a3cf25506b3328aa808b | dd80a584130ef1a0333429ba76c1cee0eb40df73 | /external/chromium_org/third_party/WebKit/Tools/Scripts/webkitpy/common/system/systemhost.py | 52b88d6fcc1d525a4ac3586c4a0e186a9942bd2a | [
"MIT",
"BSD-3-Clause",
"GPL-1.0-or-later",
"LGPL-2.0-or-later",
"Apache-2.0"
] | permissive | karunmatharu/Android-4.4-Pay-by-Data | 466f4e169ede13c5835424c78e8c30ce58f885c1 | fcb778e92d4aad525ef7a995660580f948d40bc9 | refs/heads/master | 2021-03-24T13:33:01.721868 | 2017-02-18T17:48:49 | 2017-02-18T17:48:49 | 81,847,777 | 0 | 2 | MIT | 2020-03-09T00:02:12 | 2017-02-13T16:47:00 | null | UTF-8 | Python | false | false | 2,434 | py | # Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import platform
import sys
from webkitpy.common.system import environment, executive, filesystem, platforminfo, user, workspace
class SystemHost(object):
def __init__(self):
self.executive = executive.Executive()
self.filesystem = filesystem.FileSystem()
self.user = user.User()
self.platform = platforminfo.PlatformInfo(sys, platform, self.executive)
self.workspace = workspace.Workspace(self.filesystem, self.executive)
def copy_current_environment(self):
return environment.Environment(os.environ.copy())
def print_(self, *args, **kwargs):
sep = kwargs.get('sep', ' ')
end = kwargs.get('end', '\n')
file = kwargs.get('file', None)
stderr = kwargs.get('stderr', False)
file = file or (sys.stderr if stderr else sys.stdout)
file.write(sep.join([str(arg) for arg in args]) + end)
| [
"karun.matharu@gmail.com"
] | karun.matharu@gmail.com |
1d215416e8655f9b2cca93de4850235b5d6baf1e | 78f0234c1f20605008a98aa9956a6071ccb7996f | /gag.py | f0fb681ef919f103cd7e3f5d6e36be0b7a257995 | [] | no_license | sashgorokhov-heaven/python-9gag-gui-old | b6ce8ee683323a7712d48a3c0f948703901245b2 | 870b803462180fcfaeb39c51869ebf279cb8a0d6 | refs/heads/master | 2021-05-27T23:08:47.641736 | 2014-05-23T19:22:33 | 2014-05-23T19:22:33 | 18,153,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | __author__ = 'Alexander'
from libs import constants, util
from libs.gorokhovlibs.vk import accesstokener
try:
import requests
except ImportError:
print("Module 'requests' is not found")
exit(-1)
try:
import PyQt4
except ImportError:
print("Module 'PyQt4' is not found")
exit(-1)
access_token = user_id = None
if not accesstokener.good():
from libs.gorokhovlibs.vk.qt.auth import show_browser
access_token, user_id, expires = show_browser(constants.application_id, constants.permissions_scope)
if not access_token:
exit(-1)
accesstokener.new(access_token, user_id, expires)
else:
access_token, user_id, expires = accesstokener.get()
if not access_token:
util.showmessage("Didn't get secret key!")
exit(-1)
from forms.main_form import MainForm
app = PyQt4.QtGui.QApplication([])
mainform = MainForm(access_token)
mainform.show()
app.exec_()
| [
"sashgorokhov@gmail.com"
] | sashgorokhov@gmail.com |
315cdd5b8427d37fcd29c953161c986ad08c4fc7 | e9536f9dc5d0ed823f36e1f68ee10ccd01ecb250 | /aliyun-python-sdk-cdn/aliyunsdkcdn/request/v20141111/DescribeLiveStreamNumberListRequest.py | 2f11b8e61cfade747f0c2af3e0c8dcd204323d47 | [
"Apache-2.0"
] | permissive | Linyee/aliyun-openapi-python-sdk | 70aee05946d2cb0520d378f7aff6a1f7e8177d6d | 436f6251ec32fdccd58c940f2c508a95deeacd8b | refs/heads/master | 2021-01-20T03:54:40.275523 | 2017-08-25T02:15:01 | 2017-08-25T02:15:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,007 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeLiveStreamNumberListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Cdn', '2014-11-11', 'DescribeLiveStreamNumberList')
def get_AppName(self):
return self.get_query_params().get('AppName')
def set_AppName(self,AppName):
self.add_query_param('AppName',AppName)
def get_SecurityToken(self):
return self.get_query_params().get('SecurityToken')
def set_SecurityToken(self,SecurityToken):
self.add_query_param('SecurityToken',SecurityToken)
def get_DomainName(self):
return self.get_query_params().get('DomainName')
def set_DomainName(self,DomainName):
self.add_query_param('DomainName',DomainName)
def get_EndTime(self):
return self.get_query_params().get('EndTime')
def set_EndTime(self,EndTime):
self.add_query_param('EndTime',EndTime)
def get_StartTime(self):
return self.get_query_params().get('StartTime')
def set_StartTime(self,StartTime):
self.add_query_param('StartTime',StartTime)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId) | [
"haowei.yao@alibaba-inc.com"
] | haowei.yao@alibaba-inc.com |
5c4f25e2db0cbf657ab5255b37da00a033676c9b | bbf7787d94e97d4e0c9bceb46203c08939e6e67d | /django-python/web-app/MyWeb3/urls.py | c909d76cf412c4a39ced6bfb2d1ebb4317a1dfee | [] | no_license | llanoxdewa/python | 076e6fa3ed2128c21cdd26c1be6bc82ee6917f9c | 6586170c5f48827a5e1bcb35656870b5e4eed732 | refs/heads/main | 2023-06-16T05:31:52.494796 | 2021-07-09T09:04:30 | 2021-07-09T09:04:30 | 362,782,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 882 | py | """MyWeb3 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from . import views
urlpatterns = [
path('admin/', admin.site.urls),
path('',views.home),
path('blog/',include('blog.urls')),
path('about/',include('about.urls'))
]
| [
"llanoxdew4@gmail.com"
] | llanoxdew4@gmail.com |
1547accb85364d41897ae52576bc4b7385b919f5 | f030f25989ca369e32e1031915207c808975032d | /line_prob copy.py | 348bccffb0bbb6746c995d74c9c50e3cf471dac1 | [] | no_license | vivek3141/RandomProblems | 68708eecdf6fdc035f54259611b25c9c040518c3 | 6fa699c3d219c1d392474ce896d8794108dcc8e6 | refs/heads/master | 2021-05-12T17:36:39.399410 | 2020-03-11T07:15:01 | 2020-03-11T07:15:01 | 117,049,166 | 8 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | print(f"Probability: {sum([1 if (lambda a:lambda c, num:a(a, c, num))(lambda s, c, num: c if num==7 else s(s, (B - c) * random.random() + c, num + 1))(A, 0) >= C else 0 for i in range(MAXN)]) / MAXN}") | [
"vivnps.verma@gmail.com"
] | vivnps.verma@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.