blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e08ed72e686789ad0ea484cf266e08a34d771429 | b33f1afe9f30c99f83ce2fe7ec1556b6dad8e0a6 | /03_roc.py | b090cd08264f082a535380984a2f8dfe4ce96087 | [] | no_license | Digital-Biobank/covid_variant_severity | e93845045adfc580c2cebbe6ecc5ee03aa02e9ba | cc0449a2429140352a1d6b97083321ae2002581f | refs/heads/master | 2023-04-01T16:09:00.206137 | 2021-04-26T19:09:12 | 2021-04-26T19:09:12 | 361,855,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,260 | py | import joblib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from sklearn.metrics import plot_roc_curve
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import auc, roc_curve, roc_auc_score
# %% Plot ROC curves
df = pd.read_parquet("03_77142-vcf_2-component-pca_3-cluster-kmeans_outcomes_dropna.pickle")
df_random = pd.read_parquet("03_77142-vcf_2-component-pca_3-cluster-kmeans_outcomes_dropna_random.pickle")
lr_master = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_logistic-regression-model.pickle")
lr_random = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_logistic-regression-model_random.pickle")
logreg = joblib.load("models/02_77142-vcf_sklearn-logistic-regression-model.pickle")
plot_roc_curve(logreg, X=X, y=y)
plt.show()
cv = StratifiedKFold(n_splits=5)
clfs = lr_master, lr_random
dfs = df, df_random
labs = ["Logistic Regression", "Logistic Regression (random)"]
for clf, df, lab in zip(clfs, dfs, labs):
X = df.drop("is_red", axis=1)
y = df["is_red"]
pred = clf.predict_proba(X)[::, 1]
fpr, tpr, _ = roc_curve(y, pred)
auc = roc_auc_score(y, pred)
plt.plot(fpr, tpr, label=f"{lab}, AUC={auc:.3f}")
plt.legend(loc=4)
plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r', label='Chance', alpha=.8)
plt.savefig(
"02_77142-vcf_2-component-pca-transformed_"
"mortality_3-cluster-kmeans_"
"logistic-regression_roc-curve.png"
)
plt.show()
knn_master = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_knn.pickle")
knn_random = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_knn_random.pickle")
clfs = [knn_master, knn_random]
labs = ["K nearest neighbors", "K nearest neighbors (random)"]
for clf, df, lab in zip(clfs, dfs, labs):
X = df.drop("is_red", axis=1)
y = df["is_red"]
pred = clf.predict_proba(X)[::, 1]
fpr, tpr, _ = roc_curve(y, pred)
auc = roc_auc_score(y, pred)
plt.plot(fpr, tpr, label=f"{lab}, AUC={auc:.3f}")
plt.legend(loc=4)
plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r', label='Chance', alpha=.8)
plt.xlabel("False positive rate")
plt.ylabel("True positive rate")
plt.savefig(
"02_77142-vcf_2-component-pca-transformed_"
"mortality_3-cluster-kmeans_"
"knn_roc-curve.png"
)
plt.show()
dt_master = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_dt.pickle")
dt_random = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_dt_random.pickle")
clfs = [dt_master, dt_random]
labs = ["Decision Tree", "Decision Tree (random)"]
for clf, df, lab in zip(clfs, dfs, labs):
X = df.drop("is_red", axis=1)
y = df["is_red"]
pred = clf.predict_proba(X)[::, 1]
fpr, tpr, _ = roc_curve(y, pred)
auc = roc_auc_score(y, pred)
plt.plot(fpr, tpr, label=f"{lab}, AUC={auc:.3f}")
plt.legend(loc=4)
plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r', label='chance', alpha=.8)
plt.xlabel("False positive rate")
plt.ylabel("True positive rate")
plt.savefig(
"02_77142-vcf_2-component-pca-transformed_"
"mortality_3-cluster-kmeans_"
"decision-tree_roc-curve.png"
)
plt.show()
rf_master = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_rf.pickle")
rf_random = joblib.load("03_77142-vcf_2-component-pca_3-cluster-kmeans_rf_random.pickle")
clfs = [rf_master, rf_random]
labs = ["Random Forest", "Random Forest (random)"]
for clf, df, lab in zip(clfs, dfs, labs):
X = df.drop("is_red", axis=1)
y = df["is_red"]
pred = clf.predict_proba(X)[::, 1]
fpr, tpr, _ = roc_curve(y, pred)
auc = roc_auc_score(y, pred)
plt.plot(fpr, tpr, label=f"{lab}, AUC={auc:.3f}")
plt.legend(loc=4)
plt.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r', label='chance', alpha=.8)
plt.xlabel("False positive rate")
plt.ylabel("True positive rate")
plt.savefig(
"02_77142-vcf_2-component-pca-transformed_"
"mortality_3-cluster-kmeans_"
"random-forest_roc-curve.png"
)
plt.show()
# classifier = dt_master
#
# tprs = []
# aucs = []
# mean_fpr = np.linspace(0, 1, 100)
#
# fig, ax = plt.subplots()
# for i, (train, test) in enumerate(cv.split(X, y)):
# classifier.fit(X[train], y[train])
# viz = plot_roc_curve(classifier, X[test], y[test],
# name='ROC fold {}'.format(i),
# alpha=0.3, lw=1, ax=ax)
# interp_tpr = np.interp(mean_fpr, viz.fpr, viz.tpr)
# interp_tpr[0] = 0.0
# tprs.append(interp_tpr)
# aucs.append(viz.roc_auc)
#
# ax.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',
# label='Chance', alpha=.8)
#
# mean_tpr = np.mean(tprs, axis=0)
# mean_tpr[-1] = 1.0
# mean_auc = auc(mean_fpr, mean_tpr)
# std_auc = np.std(aucs)
# ax.plot(mean_fpr, mean_tpr, color='b',
# label=r'Mean ROC (AUC = %0.2f $\pm$ %0.2f)' % (mean_auc, std_auc),
# lw=2, alpha=.8)
#
# std_tpr = np.std(tprs, axis=0)
# tprs_upper = np.minimum(mean_tpr + std_tpr, 1)
# tprs_lower = np.maximum(mean_tpr - std_tpr, 0)
# ax.fill_between(mean_fpr, tprs_lower, tprs_upper, color='grey', alpha=.2,
# label=r'$\pm$ 1 std. dev.')
#
# ax.set(xlim=[-0.05, 1.05], ylim=[-0.05, 1.05],
# title="Receiver operating characteristic example")
# ax.legend(loc="lower right")
# plt.show() | [
"marskar@gmail.com"
] | marskar@gmail.com |
4ddd2ce968f291b1087beb17871dd31a8596a4af | cf478990eb71b34c26a4094c8fe463e4551e5da0 | /app/mtgatracker_backend.py | 5392972ab2b5d0b84d5280681f9e12e5e714af46 | [
"MIT"
] | permissive | balika011/mtgatracker | 575f718ad0ec1f4b59f20291993b375a84c1651c | 5721e9b50b6ec7b21e28365dc0de8ee647dff94b | refs/heads/master | 2020-05-27T12:34:57.405589 | 2019-05-25T23:18:24 | 2019-05-25T23:18:24 | 188,620,593 | 0 | 0 | NOASSERTION | 2019-05-25T23:15:02 | 2019-05-25T23:15:01 | null | UTF-8 | Python | false | false | 9,224 | py | import sys
import os
path_to_root = os.path.abspath(os.path.join(__file__, "..", ".."))
sys.path.append(path_to_root)
import threading
import argparse
from app import tasks, queues
from util import KillableTailer
from queue import Empty
import asyncio
import datetime
import json
import websockets
import time
from pynput import mouse
from app.queues import all_die_queue, game_state_change_queue, general_output_queue, decklist_change_queue
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-i', '--log_file', default=None)
arg_parser.add_argument('-nf', '--no_follow', action="store_true", default=False)
arg_parser.add_argument('-f', '--read_full_log', action="store_true", default=False)
arg_parser.add_argument('-m', '--mouse_events', action="store_true", default=False)
arg_parser.add_argument('-p', '--port', default=8089, type=int)
args = arg_parser.parse_args()
print("process started with args: {}".format(args))
async def stats(websocket):
try:
game_state = game_state_change_queue.get(timeout=0.01)
except Empty:
game_state = False
if game_state:
now = datetime.datetime.utcnow().isoformat() + 'Z'
game_state["now"] = now
game_state["data_type"] = "game_state"
await websocket.send(json.dumps(game_state))
# else:
# await websocket.send('{"no": "data"}')
await asyncio.sleep(0.01)
async def decks(websocket):
decklist_change = {}
try:
decks = decklist_change_queue.get(timeout=0.01)
except Empty:
decks = False
if decks:
decklist_change["decks"] = decks
now = datetime.datetime.utcnow().isoformat() + 'Z'
decklist_change["now"] = now
decklist_change["data_type"] = "decklist_change"
await websocket.send(json.dumps(decklist_change))
await asyncio.sleep(0.01)
async def output(websocket):
try:
message = general_output_queue.get(timeout=0.01)
except Empty:
message = False
if message:
now = datetime.datetime.utcnow().isoformat() + 'Z'
message['now'] = now
if isinstance(message, dict) and "error" in message.keys():
message["data_type"] = "error"
else:
message["data_type"] = "message"
message_to_send = json.dumps(message)
await websocket.send(message_to_send)
await asyncio.sleep(0.01)
async def consumer_handler(websocket):
async for message in websocket:
if message == "die":
all_die_queue.put("DIE")
else:
print("ack {}".format(message))
await websocket.send("ack {}".format(message))
async def handler(websocket, _):
while all_die_queue.empty():
consumer_task = asyncio.ensure_future(consumer_handler(websocket))
stats_task = asyncio.ensure_future(stats(websocket))
output_task = asyncio.ensure_future(output(websocket))
decks_task = asyncio.ensure_future(decks(websocket))
done, pending = await asyncio.wait(
[decks_task, consumer_task, stats_task, output_task],
return_when=asyncio.FIRST_COMPLETED,
)
for task in pending:
task.cancel()
time.sleep(0.1)
websocket.close()
loop = asyncio.get_event_loop()
loop.stop()
if args.log_file is None: # assume we're on windows for now # TODO
appdata_roaming = os.getenv("APPDATA")
wotc_locallow_path = os.path.join(appdata_roaming, "..", "LocalLow", "Wizards Of The Coast", "MTGA")
output_log = os.path.join(wotc_locallow_path, "output_log.txt")
if not os.path.exists(output_log):
output_log = None
args.log_file = output_log
def click_event(_x, _y, button, pressed):
if pressed:
if button == mouse.Button.right:
general_output_queue.put({"right_click": True})
if button == mouse.Button.left:
general_output_queue.put({"left_click": True})
if not all_die_queue.empty():
return False
def start_mouse_listener():
with mouse.Listener(on_click=click_event) as listener:
listener.join()
if __name__ == "__main__":
print("starting websocket server with port {}".format(args.port))
start_server = websockets.serve(handler, '127.0.0.1', args.port)
asyncio.get_event_loop().run_until_complete(start_server)
print("starting block watch task server")
block_watch_process = threading.Thread(target=tasks.block_watch_task, args=(queues.block_read_queue, queues.json_blob_queue, ))
block_watch_process.start()
print("starting json watch task server")
json_watch_process = threading.Thread(target=tasks.json_blob_reader_task, args=(queues.json_blob_queue, queues.json_blob_queue, ))
json_watch_process.start()
current_block = ""
print("starting websocket thread")
websocket_thread = threading.Thread(target=asyncio.get_event_loop().run_forever)
websocket_thread.start()
print("starting mouse thread")
mouse_thread = None
if args.mouse_events:
mouse_thread = threading.Thread(target=start_mouse_listener)
mouse_thread.start()
if args.read_full_log:
print("WARNING: known issue with reading full log!")
print("For some reason, reading the full log causes the python process to never exit.")
print("It has something to do with putting data into the queue from this block (line marked), but other than")
print("that I really can't figure it out. Anyways, you'll have to kill the python process manually.")
with open(args.log_file, 'r') as rf:
previous_block_end = 0
for idx, line in enumerate(rf):
if line and (line.startswith("[UnityCrossThreadLogger]") or line.startswith("[Client GRE]")):
# this is the start of a new block (with title), end the last one
# print(current_block)
if "{" in current_block: # try to speed up debug runs by freeing up json watcher task
# which is likely the slowest
queues.block_read_queue.put((idx, current_block))
current_block = line.strip() + "\n"
elif line and line.startswith("]") or line.startswith("}"):
current_block += line.strip() + "\n"
# this is the END of a block, end it and start a new one
if "{" in current_block: # try to speed up debug runs by freeing up json watcher task
# which is likely the slowest
queues.block_read_queue.put((idx, current_block))
current_block = ""
else:
# we're in the middle of a block somewhere
stripped = line.strip()
if stripped:
current_block += stripped + "\n"
if not all_die_queue.empty():
break
count = 0
if not args.no_follow and all_die_queue.empty():
print("starting to tail file: {}".format(args.log_file))
if args.log_file:
with open(args.log_file) as log_file:
kt = KillableTailer(log_file, queues.all_die_queue)
kt.seek_end()
for line in kt.follow(1):
if line and (line.startswith("[UnityCrossThreadLogger]") or line.startswith("[Client GRE]")):
# this is the start of a new block (with title), end the last one
# print(current_block)
if "{" in current_block: # try to speed up debug runs by freeing up json watcher task
# which is likely the slowest
queues.block_read_queue.put(current_block)
current_block = line.strip() + "\n"
elif line and line.startswith("]") or line.startswith("}"):
current_block += line.strip() + "\n"
# this is the END of a block, end it and start a new one
if "{" in current_block: # try to speed up debug runs by freeing up json watcher task
# which is likely the slowest
queues.block_read_queue.put(current_block)
current_block = ""
else:
# we're in the middle of a block somewhere
stripped = line.strip()
if stripped:
current_block += stripped + "\n"
if not all_die_queue.empty():
break
else:
general_output_queue.put({"error": "NoLogException", "msg": "No log file present. Please run MTGA at least once before launching MTGA Tracker.", "count": 1})
queues.block_read_queue.put(None)
block_watch_process.join()
json_watch_process.join()
websocket_thread.join()
start_server.ws_server.close()
if mouse_thread:
mouse_thread.join()
print("mouse joined!")
while queues.json_blob_queue.qsize():
queues.json_blob_queue.get() | [
"hawkins.spencer@gmail.com"
] | hawkins.spencer@gmail.com |
809baad515b0e0b836349fe344facf3fa45083de | debea7714c997912089fde6a0971989f363c72e8 | /lista4/f.py | 1054d6968108dbc32d6d25460721e442edf9ad97 | [] | no_license | ezequiasOR/aa-iniciante | 42bc0f9f4df5bd9a68fcc1ba2d6558bcffff6c90 | 7a4e3882f74eb3941b3658e82abbbd9a3ecd3776 | refs/heads/master | 2023-01-29T04:28:55.678450 | 2020-12-08T23:57:17 | 2020-12-08T23:57:17 | 293,347,924 | 1 | 0 | null | 2020-10-30T15:08:53 | 2020-09-06T19:28:48 | Python | UTF-8 | Python | false | false | 194 | py | resp = [0]*100001
n = int(raw_input())
count = 1
for i in range(2, n+1):
if resp[i] == 0:
for j in range(i, n+1, i):
resp[j] = count
count += 1
for i in range(2, n+1):
print resp[i],
| [
"ezequias.rocha@ccc.ufcg.edu.br"
] | ezequias.rocha@ccc.ufcg.edu.br |
cc089c46a79ae07632a468a15cec36414e2c88f3 | 843e855309267575b9e6f43ffcbee68bf85bab37 | /swet/bloggerspot/views.py | 14219c68e79489d71ca00f10fdf49a4af1d2d026 | [] | no_license | Arghachatterjee11/blog1 | b3202cb00a5b1adf0d50919c230cf9cc2db4115c | 8a79714af7a68732d7463d917bc5f56de6029bc6 | refs/heads/main | 2023-07-15T06:09:28.820369 | 2021-08-31T15:33:19 | 2021-08-31T15:33:19 | 401,750,641 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py |
from django.shortcuts import render
def home(request):
return render(request, 'home.html',{})
| [
"you@example.com"
] | you@example.com |
bc1543ebdb3cc210318ada2fd80370218e3ac405 | f4cc5a888d0dd1a5975e0467e21da0bedf48c921 | /runtime/test_hq.py | 2e0d0ba8ede7cc804c35e035673420e2ea6254bf | [] | no_license | wenjiandu/QUANTAXISRUNTIME | 88dbca3e65ed2510e001392997b97577f8e1214c | 2cf28abe1f56d4219f1f89980d7b64460e65856c | refs/heads/master | 2020-04-10T04:01:04.097325 | 2017-11-21T04:45:24 | 2017-11-21T04:45:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | from hqservice.fetcher import quotation
import QUANTAXIS as QA
stock_list=QA.QA_fetch_stock_block_adv().code
for i in range(100):
print(len(quotation(stock_list))) | [
"yutiansut@qq.com"
] | yutiansut@qq.com |
2df3e21491d807fa232e644142ee27c142df344e | f32421e59d1b42ff42ef56a529e365dd094160d9 | /configs/gcnet/nl_stage/mask_rcnn_nl_eg_c3_r50_fpn_1x.py | 86ab75cf3b4ce555de68d70c4531b16eeb29fbb8 | [
"Apache-2.0"
] | permissive | li-haoran/DNL-Object-Detection | 634d867c2c8126c333884de678c3d9c16a78a1ba | 6ae88842d6237a465559c420c610444bcb2d9405 | refs/heads/master | 2023-01-28T05:35:24.033494 | 2020-12-07T02:48:57 | 2020-12-07T02:48:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,880 | py | # model settings
model = dict(
type='MaskRCNN',
pretrained='torchvision://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch',
nlb=dict(mode='embedded_gaussian',
reduction=4),
stage_with_nlb=[[], [-2], [], []],
),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=False,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=81,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False))
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/mask_rcnn_nl_eg_c3_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| [
"yaozhuliang13@gmail.com"
] | yaozhuliang13@gmail.com |
1017ee77d69ca5215e4188bc08bd8329ee38f4b8 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/aio/operations/_database_operations.py | 6cf8af613227dde0a71c55c7d088f7485c0db1f3 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 16,390 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._database_operations import (
build_list_metric_definitions_request,
build_list_metrics_request,
build_list_usages_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DatabaseOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.cosmosdb.aio.CosmosDBManagementClient`'s
:attr:`database` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_metrics(
self, resource_group_name: str, account_name: str, database_rid: str, filter: str, **kwargs: Any
) -> AsyncIterable["_models.Metric"]:
"""Retrieves the metrics determined by the given filter for the given database account and
database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param database_rid: Cosmos DB database rid. Required.
:type database_rid: str
:param filter: An OData filter expression that describes a subset of metrics to return. The
parameters that can be filtered are name.value (name of the metric, can have an or of multiple
names), startTime, endTime, and timeGrain. The supported operator is eq. Required.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Metric or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.cosmosdb.models.Metric]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-08-15-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.MetricListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_metrics_request(
resource_group_name=resource_group_name,
account_name=account_name,
database_rid=database_rid,
subscription_id=self._config.subscription_id,
filter=filter,
api_version=api_version,
template_url=self.list_metrics.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("MetricListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_metrics.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/databases/{databaseRid}/metrics"} # type: ignore
@distributed_trace
def list_usages(
self,
resource_group_name: str,
account_name: str,
database_rid: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.Usage"]:
"""Retrieves the usages (most recent data) for the given database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param database_rid: Cosmos DB database rid. Required.
:type database_rid: str
:param filter: An OData filter expression that describes a subset of usages to return. The
supported parameter is name.value (name of the metric, can have an or of multiple names).
Default value is None.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Usage or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.cosmosdb.models.Usage]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-08-15-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.UsagesResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_usages_request(
resource_group_name=resource_group_name,
account_name=account_name,
database_rid=database_rid,
subscription_id=self._config.subscription_id,
filter=filter,
api_version=api_version,
template_url=self.list_usages.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("UsagesResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_usages.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/databases/{databaseRid}/usages"} # type: ignore
@distributed_trace
def list_metric_definitions(
self, resource_group_name: str, account_name: str, database_rid: str, **kwargs: Any
) -> AsyncIterable["_models.MetricDefinition"]:
"""Retrieves metric definitions for the given database.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param database_rid: Cosmos DB database rid. Required.
:type database_rid: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either MetricDefinition or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.cosmosdb.models.MetricDefinition]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop(
"api_version", _params.pop("api-version", self._config.api_version)
) # type: Literal["2022-08-15-preview"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.MetricDefinitionsListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_metric_definitions_request(
resource_group_name=resource_group_name,
account_name=account_name,
database_rid=database_rid,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_metric_definitions.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("MetricDefinitionsListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_metric_definitions.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/databases/{databaseRid}/metricDefinitions"} # type: ignore
| [
"noreply@github.com"
] | kurtzeborn.noreply@github.com |
6c08fe9ceaf7eedae3d6d1ff8b2e4e8906ba1ac7 | e8708b79f22859c2623ea59d4e32193270d9c760 | /Caminata/frames.py | aae8b1cb69f226b651364da4fb6d3678b69e47bb | [] | no_license | vinsmokemau/PDI | 23c927ae36e37a6296ef6f1eb5576e9a800b8e20 | cd08cd02fbd81fee82c85673257912fc87e457d7 | refs/heads/master | 2023-07-19T08:37:38.036967 | 2020-10-21T01:33:46 | 2020-10-21T01:33:46 | 176,957,931 | 0 | 0 | null | 2023-07-06T21:34:03 | 2019-03-21T14:09:17 | Python | UTF-8 | Python | false | false | 2,529 | py | """Extracting and Saving Video Frames using OpenCV-PythonPython."""
import cv2
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
def rgb2gray(image):
"""Transform a color image to a grayscale image."""
return np.dot(image[..., :3], [0.299, 0.587, 0.114])
# Opens the Video file
cap = cv2.VideoCapture('caminata_lenta.mp4')
i = 0
frames = []
while(cap.isOpened()):
ret, frame = cap.read()
if ret is False:
break
if len(str(i)) > 1:
cv2.imwrite('frame' + str(i) + '.jpg', frame)
frames.append(rgb2gray(mpimg.imread('frame' + str(i) + '.jpg')))
else:
cv2.imwrite('frame0' + str(i) + '.jpg', frame)
frames.append(rgb2gray(mpimg.imread('frame0' + str(i) + '.jpg')))
i += 1
cap.release()
cv2.destroyAllWindows()
blue_spot = rgb2gray(mpimg.imread('spot_blue.jpg'))
green_spot = rgb2gray(mpimg.imread('spot_green.jpg'))
red_spot = rgb2gray(mpimg.imread('spot_red.jpg'))
blue_spot_min = np.amin(blue_spot)
green_spot_min = np.amin(green_spot)
red_spot_min = np.amin(red_spot)
blue_spot_max = np.amax(blue_spot)
green_spot_max = np.amax(green_spot)
red_spot_max = np.amax(red_spot)
red_images = []
green_images = []
blue_images = []
for gray_frame in frames:
red_image = np.zeros(gray_frame.shape)
green_image = np.zeros(gray_frame.shape)
blue_image = np.zeros(gray_frame.shape)
y = gray_frame.shape[0]
x = gray_frame.shape[1]
for j in range(1, y - 1):
for i in range(1, x - 1):
if blue_spot_min < gray_frame[j, i] < blue_spot_max:
blue_image[j, i] = 255
if green_spot_min < gray_frame[j, i] < green_spot_max:
green_image[j, i] = 255
if red_spot_min < gray_frame[j, i] < red_spot_max:
red_image[j, i] = 255
red_images.append(red_image)
green_images.append(green_image)
blue_images.append(blue_image)
fig = plt.gcf()
fig.show()
for red_gray_image, green_gray_image, blue_gray_image in red_images, green_images, blue_images:
plt.subplot(1, 3, 1)
plt.imshow(red_gray_image, cmap=plt.get_cmap('gray'))
plt.axis('off')
plt.subplot(1, 3, 2)
plt.imshow(green_gray_image, cmap=plt.get_cmap('gray'))
plt.axis('off')
plt.subplot(1, 3, 3)
plt.imshow(blue_gray_image, cmap=plt.get_cmap('gray'))
plt.axis('off')
fig.canvas.draw()
"""
fig = plt.gcf()
fig.show()
for gray_frame in frames:
plt.imshow(gray_frame, cmap=plt.get_cmap('gray'))
fig.canvas.draw()
"""
| [
"maumg1196@gmail.com"
] | maumg1196@gmail.com |
bbf89146767efd68bf9de76bb3bbf8a78f5d8c2f | f213549d8725acaf5417d0d5290430d499bf3cf3 | /lino/modlib/polls/models.py | 262a35ec08079cb31380384745d5a2cb3525d5fe | [
"BSD-2-Clause"
] | permissive | ExcellentServ/lino | 56c8159428a451058a35dad75e8799d239c2dc0e | 9ea630e719d47843dd8427dd64db22633626fd3d | refs/heads/master | 2020-12-28T23:15:47.380120 | 2015-01-27T14:53:10 | 2015-01-27T14:53:10 | 29,911,723 | 0 | 0 | null | 2015-01-27T11:44:09 | 2015-01-27T11:44:08 | null | UTF-8 | Python | false | false | 18,518 | py | # -*- coding: UTF-8 -*-
# Copyright 2013-2015 Luc Saffre
# License: BSD (see file COPYING for details)
"""Database models for `lino.modlib.polls`.
.. rubric:: Models overview
A :class:`Poll` is a collection of :class:`Questions <Question>` which
we want to ask repeatedly to different people. Each Question has a
*question text* and a :class:`ChoiceSet`, i.e. a stored ordered set of
possible choices. A :class:`Response` is when somebody answers to a
`Poll`. A Response contains a set of :class:`AnswerChoices
<AnswerChoice>`, each of which represents a given Choice selected by
the questioned person for a given `Question` of the `Poll`. If the
Question is *multiple choice*, then there may be more than one
`AnswerChoice` per `Question`. A `Response` also contains a set of
`AnswerRemarks`, each of with represents a remark written by the
responding person for a given Question of the Poll.
See also :ref:`tested.polly`.
"""
import logging
logger = logging.getLogger(__name__)
from django.db import models
from django.utils.translation import ugettext_lazy as _
from lino.api import dd
from lino import mixins
from lino.utils import join_elems
from lino.utils.xmlgen.html import E
from lino.mixins import Referrable
from lino.modlib.users.mixins import ByUser, UserAuthored
from .utils import ResponseStates, PollStates
config = dd.plugins.polls
NullBooleanField = models.NullBooleanField
NUMBERED_TITLE_FORMAT = "%s) %s"
class ChoiceSet(mixins.BabelNamed):
class Meta:
verbose_name = _("Choice Set")
verbose_name_plural = _("Choice Sets")
class ChoiceSets(dd.Table):
model = ChoiceSet
detail_layout = """
name
ChoicesBySet
"""
class Choice(mixins.BabelNamed, mixins.Sequenced):
class Meta:
verbose_name = _("Choice")
verbose_name_plural = _("Choices")
choiceset = models.ForeignKey('polls.ChoiceSet', related_name='choices')
def get_siblings(self):
return self.choiceset.choices.order_by('seqno')
@dd.action()
def select_by_response(self, ar):
mi = ar.master_instance
dd.logger.info("20140929 %s", mi)
if isinstance(mi, Response):
AnswerChoice(response=mi, choice=self).save()
class Choices(dd.Table):
model = 'polls.Choice'
class ChoicesBySet(Choices):
master_key = 'choiceset'
class Poll(UserAuthored, mixins.CreatedModified, Referrable):
class Meta:
abstract = dd.is_abstract_model(__name__, 'Poll')
verbose_name = _("Poll")
verbose_name_plural = _("Polls")
ordering = ['created']
title = models.CharField(_("Title"), max_length=200)
details = models.TextField(_("Details"), blank=True)
default_choiceset = models.ForeignKey(
'polls.ChoiceSet',
null=True, blank=True,
related_name='polls',
verbose_name=_("Default Choiceset"))
default_multiple_choices = models.BooleanField(
_("Allow multiple choices"), default=False)
questions_to_add = models.TextField(
_("Questions to add"),
help_text=_("Paste text for questions to add. "
"Every non-empty line will create one question."),
blank=True)
state = PollStates.field(default=PollStates.draft)
def __unicode__(self):
return self.ref or self.title
def after_ui_save(self, ar):
if self.questions_to_add:
#~ print "20131106 self.questions_to_add", self.questions_to_add
#~ qkw = dict(choiceset=self.default_choiceset)
q = None
qkw = dict()
number = 1
for ln in self.questions_to_add.splitlines():
ln = ln.strip()
if ln:
if ln.startswith('#'):
q.details = ln[1:]
q.save()
continue
elif ln.startswith('='):
q = Question(poll=self, title=ln[1:],
is_heading=True, **qkw)
number = 1
else:
q = Question(poll=self, title=ln,
number=str(number), **qkw)
number += 1
q.full_clean()
q.save()
qkw.update(seqno=q.seqno + 1)
self.questions_to_add = ''
self.save() # save again because we modified afterwards
super(Poll, self).after_ui_save(ar)
@dd.virtualfield(dd.HtmlBox(_("Result")))
def result(self, ar):
return E.div(*tuple(get_poll_result(self)))
def get_poll_result(self):
#~ yield E.h1(self.title)
for cs in ChoiceSet.objects.all():
questions = self.questions.filter(choiceset=cs)
if questions.count() > 0:
yield E.h2(unicode(cs))
for question in questions:
yield E.p(question.text)
class PollDetail(dd.FormLayout):
main = "general results"
general = dd.Panel("""
title state
details
user created modified default_choiceset default_multiple_choices
polls.QuestionsByPoll
""", label=_("General"))
results = dd.Panel("""
polls.ResponsesByPoll
# result
PollResult
""", label=_("Results"))
class Polls(dd.Table):
model = 'polls.Poll'
column_names = 'created title user state *'
detail_layout = PollDetail()
insert_layout = dd.FormLayout("""
title
default_choiceset default_multiple_choices
questions_to_add
""", window_size=(60, 15))
class MyPolls(ByUser, Polls):
column_names = 'created title state *'
class Question(mixins.Sequenced):
class Meta:
verbose_name = _("Question")
verbose_name_plural = _("Questions")
poll = models.ForeignKey('polls.Poll', related_name='questions')
title = models.CharField(_("Title"), max_length=200)
details = models.TextField(_("Details"), blank=True)
number = models.CharField(_("No."), max_length=20, blank=True)
# text = models.TextField(verbose_name=_("Text"))
choiceset = models.ForeignKey('polls.ChoiceSet', blank=True, null=True)
multiple_choices = models.BooleanField(
_("Allow multiple choices"), blank=True)
is_heading = models.BooleanField(_("Title"), default=False)
def __unicode__(self):
#~ return self.text[:40].strip() + ' ...'
if self.number:
return NUMBERED_TITLE_FORMAT % (self.number, self.title)
return self.title
def get_siblings(self):
#~ return self.choiceset.choices.order_by('seqno')
return self.poll.questions.order_by('seqno')
def get_choiceset(self):
if self.is_heading:
return None
if self.choiceset is None:
return self.poll.default_choiceset
return self.choiceset
def full_clean(self, *args, **kw):
if self.multiple_choices is None:
self.multiple_choices = self.poll.default_multiple_choices
#~ if self.choiceset_id is None:
#~ self.choiceset = self.poll.default_choiceset
super(Question, self).full_clean()
class Questions(dd.Table):
model = 'polls.Question'
class QuestionsByPoll(Questions):
master_key = 'poll'
column_names = 'title choiceset multiple_choices is_heading'
auto_fit_column_widths = True
class ToggleChoice(dd.Action):
parameters = dict(
# response=dd.ForeignKey("polls.Response"),
question=dd.ForeignKey("polls.Question"),
choice=dd.ForeignKey("polls.Choice"),
)
no_params_window = True
def run_from_ui(self, ar, **kw):
response = ar.selected_rows[0]
if response is None:
return
pv = ar.action_param_values
qs = AnswerChoice.objects.filter(response=response, **pv)
if qs.count() == 1:
qs[0].delete()
elif qs.count() == 0:
if not pv.question.multiple_choices:
# delete any other choice which might exist
qs = AnswerChoice.objects.filter(
response=response, question=pv.question)
qs.delete()
obj = AnswerChoice(response=response, **pv)
obj.full_clean()
obj.save()
else:
raise Exception(
"Oops: %s returned %d rows." % (qs.query, qs.count()))
ar.success(refresh=True)
# dd.logger.info("20140930 %s", obj)
class Response(UserAuthored, mixins.Registrable,
mixins.CreatedModified):
class Meta:
verbose_name = _("Response")
verbose_name_plural = _("Responses")
ordering = ['created']
poll = dd.ForeignKey('polls.Poll', related_name='responses')
state = ResponseStates.field(default=ResponseStates.draft)
remark = models.TextField(verbose_name=_("My general remark"), blank=True)
partner = dd.ForeignKey('contacts.Partner', blank=True, null=True)
toggle_choice = ToggleChoice()
@dd.chooser()
def poll_choices(cls):
return Poll.objects.filter(state=PollStates.published)
#~ def after_ui_save(self,ar):
#~ if self.answers.count() == 0:
#~ for obj in self.poll.questions.all():
#~ Answer(response=self,question=obj).save()
#~
#~ super(Response,self).after_ui_save(ar)
def __unicode__(self):
return _("%(user)s's response to %(poll)s") % dict(
user=self.user, poll=self.poll)
class Responses(dd.Table):
model = 'polls.Response'
detail_layout = """
user poll state created modified
polls.AnswersByResponse
remark
"""
insert_layout = """
user
poll
"""
@classmethod
def get_detail_title(self, ar, obj):
txt = _("response to %(poll)s") % dict(poll=obj.poll)
if obj.user == ar.get_user():
return _("My %s") % txt
return _("%(user)s's %(what)s") % dict(user=obj.user, what=txt)
class MyResponses(ByUser, Responses):
column_names = 'created poll state remark *'
class ResponsesByPoll(Responses):
master_key = 'poll'
column_names = 'created user state partner remark *'
class ResponsesByPartner(Responses):
master_key = 'partner'
column_names = 'created user state remark *'
slave_grid_format = 'summary'
@classmethod
def get_slave_summary(self, obj, ar):
if obj is None:
return
qs = Response.objects.filter(partner=obj).order_by(
'poll__ref', 'modified')
polls_with_responses = []
current = None
for resp in qs:
if current is None:
current = (resp.poll, [])
if resp.poll != current[0]:
polls_with_responses.append(current)
current = (resp.poll, [])
current[1].append(resp)
if current is not None:
polls_with_responses.append(current)
items = []
for poll, responses in polls_with_responses:
elems = [unicode(poll), ' : ']
elems += join_elems(
[ar.obj2html(r, dd.fds(r.modified))
for r in responses], sep=', ')
items.append(E.li(*elems))
return E.div(E.ul(*items))
class AnswerChoice(dd.Model):
class Meta:
verbose_name = _("Answer Choice")
verbose_name_plural = _("Answer Choices")
ordering = ['question__seqno']
response = models.ForeignKey('polls.Response')
question = models.ForeignKey('polls.Question')
choice = models.ForeignKey(
'polls.Choice',
related_name='answers', verbose_name=_("My answer"),
blank=True, null=True)
@dd.chooser()
def choice_choices(cls, question):
return question.get_choiceset().choices.all()
class AnswerChoices(dd.Table):
model = 'polls.AnswerChoice'
class AnswerRemark(dd.Model):
class Meta:
verbose_name = _("Answer Remark")
verbose_name_plural = _("Answer Remarks")
ordering = ['question__seqno']
response = models.ForeignKey('polls.Response')
question = models.ForeignKey('polls.Question')
remark = models.TextField(_("My remark"), blank=True)
class AnswerRemarks(dd.Table):
model = 'polls.AnswerRemarks'
FORWARD_TO_QUESTION = tuple("full_clean after_ui_save disable_delete".split())
class Answer(object):
"""Volatile object to represent the one and only answer to a given
question in a given response.
"""
def __init__(self, response, question):
self.response = response
self.question = question
self.pk = self.id = question.pk
try:
self.remark = AnswerRemark.objects.get(
question=question, response=response)
except AnswerRemark.DoesNotExist:
self.remark = AnswerRemark(question=question, response=response)
self.choices = AnswerChoice.objects.filter(
question=question, response=response)
for k in FORWARD_TO_QUESTION:
setattr(self, k, getattr(question, k))
class AnswerRemarkField(dd.VirtualField):
"""
An editable virtual field.
"""
editable = True
def __init__(self):
t = models.TextField(_("My remark"), blank=True)
dd.VirtualField.__init__(self, t, None)
def set_value_in_object(self, ar, obj, value):
#~ e = self.get_entry_from_answer(obj)
obj.remark.remark = value
obj.remark.save()
def value_from_object(self, obj, ar):
#~ logger.info("20120118 value_from_object() %s",dd.obj2str(obj))
#~ e = self.get_entry_from_answer(obj)
return obj.remark.remark
class AnswersByResponse(dd.VirtualTable):
"""This is the table used for answering to a poll.
.. attribute:: answer_buttons
A virtual field that displays the currently selected answer(s) for
this question, eventually (if editing is permitted) together with
buttons to modify the selection.
"""
label = _("Answers")
editable = True
master = 'polls.Response'
column_names = 'question:40 answer_buttons:30 remark:20 *'
variable_row_height = True
auto_fit_column_widths = True
#~ slave_grid_format = 'html'
remark = AnswerRemarkField()
@classmethod
def get_pk_field(self):
return Question._meta.pk
#~ return AnswerPKField()
@classmethod
def get_row_by_pk(self, ar, pk):
response = ar.master_instance
#~ if response is None: return
q = Question.objects.get(pk=pk)
return Answer(response, q)
@classmethod
def get_row_permission(cls, obj, ar, state, ba):
return True
@classmethod
def disable_delete(self, obj, ar):
return "Not deletable"
@classmethod
def get_data_rows(self, ar):
response = ar.master_instance
if response is None:
return
for q in Question.objects.filter(poll=response.poll):
yield Answer(response, q)
@dd.displayfield(_("Question"))
def question(self, obj, ar):
if obj.question.number:
txt = NUMBERED_TITLE_FORMAT % (
obj.question.number, obj.question.title)
else:
txt = obj.question.title
if obj.question.is_heading:
txt = E.b(txt)
return E.p(txt)
@dd.displayfield(_("My answer"))
def answer_buttons(self, obj, ar):
l = []
pv = dict(question=obj.question)
ia = obj.response.toggle_choice
cs = obj.question.get_choiceset()
if cs is None:
return ''
for c in cs.choices.all():
pv.update(choice=c)
text = unicode(c)
qs = AnswerChoice.objects.filter(response=obj.response, **pv)
if qs.count() == 1:
text = [E.b('[', text, ']')]
elif qs.count() == 0:
pass
else:
raise Exception(
"Oops: %s returned %d rows." % (qs.query, qs.count()))
request_kwargs = dict(action_param_values=pv)
e = ar.instance_action_button(
ia, text, request_kwargs=request_kwargs,
style="text-decoration:none")
l.append(e)
return E.p(*join_elems(l))
class PollResult(Questions):
"Shows a summay of responses to this poll."
master_key = 'poll'
column_names = "question choiceset answers a1"
@classmethod
def get_data_rows(self, ar):
poll = ar.master_instance
if poll is None:
return
for obj in super(PollResult, self).get_request_queryset(ar):
yield obj
@dd.virtualfield(dd.ForeignKey('polls.Question'))
def question(self, obj, ar):
return obj
@dd.requestfield(_("#Answers"))
def answers(self, obj, ar):
#~ return ar.spawn(Answer.objects.filter(question=obj))
return AnswerChoices.request(known_values=dict(question=obj))
@dd.requestfield(_("A1"))
def a1(self, obj, ar):
c = iter(obj.get_choiceset().choices.all()).next()
#~ return Answer.objects.filter(question=obj,choice=c)
return AnswerChoices.request(
known_values=dict(question=obj, choice=c))
#~
#~ @dd.receiver(dd.database_ready)
#~ def on_database_ready(sender,**kw):
#~ """
#~ Builds columns dynamically from the :class:`PersonGroup` database table.
#~
#~ This must also be called before each test case.
#~ """
#~ self = PollResult
#~ self.column_names = 'seqno text'
#~ for obj in Questions.objects.filter(ref_name__isnull=False).order_by('ref_name'):
#~ def w(pg):
# ~ # we must evaluate `today` for each request, not only once when `database_ready`
#~ today = datetime.date.today()
#~ def func(self,obj,ar):
#~ return Clients.request(
#~ param_values=dict(group=pg,
#~ coached_by=obj,start_date=today,end_date=today))
#~ return func
#~ vf = dd.RequestField(w(pg),verbose_name=pg.name)
#~ self.add_virtual_field('G'+pg.ref_name,vf)
#~ self.column_names += ' ' + vf.name
#~
#~ self.column_names += ' primary_clients active_clients row_total'
# ~ self.clear_handle() # avoid side effects when running multiple test cases
#~ settings.SITE.resolve_virtual_fields()
dd.add_user_group('polls', config.verbose_name)
| [
"luc.saffre@gmail.com"
] | luc.saffre@gmail.com |
2c2bd4e0b8e732d00061cbe4b692a413cc6990e2 | 8850f01a5b61fad9175cb7246a4c7f3c6b455b4d | /tests/test_frameDict.py | b4edcc9edbca205d296e4e7f5c75104595bacb87 | [] | no_license | lsst/astshim | 26134d1f604e6c892040c99e71217ac833110a3d | 3273194fdb8a9e75327a71b15298448ff22f26ca | refs/heads/main | 2023-09-04T09:58:40.617680 | 2023-07-10T00:10:23 | 2023-07-10T00:10:23 | 64,975,235 | 2 | 18 | null | 2023-04-05T17:10:43 | 2016-08-05T00:07:02 | C++ | UTF-8 | Python | false | false | 18,058 | py | import unittest
import numpy as np
from numpy.testing import assert_allclose
import astshim as ast
from astshim.test import MappingTestCase
from astshim.detail.testUtils import makeFrameDict
class TestFrameDict(MappingTestCase):
def setUp(self):
self.frame1 = ast.Frame(2, "Domain=frame1, Ident=f1")
self.frame2 = ast.Frame(2, "Domain=frame2, Ident=f2")
self.zoom = 1.5
self.zoomMap = ast.ZoomMap(2, self.zoom, "Ident=zoomMap")
self.initialNumFrames = self.frame1.getNObject() # may be >2 when run using pytest
self.initialNumZoomMap = self.zoomMap.getNObject() # may be > 1 when run using pytest
def checkDict(self, frameDict):
for index in range(1, frameDict.nFrame + 1):
domain = frameDict.getFrame(index).domain
self.assertEqual(frameDict.getIndex(domain), index)
self.assertEqual(frameDict.getFrame(domain).domain, domain)
def test_FrameDictOneFrameConstructor(self):
frameDict = ast.FrameDict(self.frame1)
self.assertIsInstance(frameDict, ast.FrameDict)
self.assertEqual(frameDict.nFrame, 1)
self.assertEqual(frameDict.getAllDomains(), {"FRAME1"})
self.assertEqual(frameDict.getIndex("frame1"), 1) # should be case blind
with self.assertRaises(IndexError):
frameDict.getIndex("missingDomain")
with self.assertRaises(IndexError):
frameDict.getIndex("")
# Make sure the frame is deep copied
self.frame1.domain = "NEWDOMAIN"
self.assertEqual(frameDict.getFrame("FRAME1").domain, "FRAME1")
self.assertEqual(frameDict.getFrame(frameDict.BASE).domain, "FRAME1")
self.assertEqual(self.frame1.getRefCount(), 1)
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 1)
# make sure BASE and CURRENT are available on the class and instance
self.assertEqual(ast.FrameDict.BASE, frameDict.BASE)
self.assertEqual(ast.FrameDict.CURRENT, frameDict.CURRENT)
self.checkCopy(frameDict)
indata = np.array([
[0.0, 0.1, -1.5],
[5.1, 0.0, 3.1],
])
self.checkMappingPersistence(frameDict, indata)
self.checkPersistence(frameDict, typeFromChannel=ast.FrameSet)
self.checkDict(frameDict)
def test_FrameDictFrameSetConstructor(self):
frameSet = ast.FrameSet(self.frame1, self.zoomMap, self.frame2)
frameDict = ast.FrameDict(frameSet)
indata = np.array([[1.1, 2.1, 3.1], [1.2, 2.2, 3.2]])
predictedOut = indata * self.zoom
assert_allclose(frameDict.applyForward(indata), predictedOut)
assert_allclose(frameDict.applyInverse(predictedOut), indata)
frameDict2 = makeFrameDict(frameSet)
self.assertEqual(frameDict2.getRefCount(), 1)
def test_FrameDictAddFrame(self):
frameDict = ast.FrameDict(self.frame1)
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 1)
frameDict.addFrame(1, self.zoomMap, self.frame2)
self.assertEqual(frameDict.nFrame, 2)
self.assertEqual(frameDict.getFrame("FRAME2").domain, "FRAME2")
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "FRAME2")
self.assertEqual(frameDict.getAllDomains(), {"FRAME1", "FRAME2"})
self.assertEqual(self.frame2.getRefCount(), 1)
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 2)
# make sure all objects were deep copied
self.frame1.domain = "newBase"
self.zoomMap.ident = "newMapping"
self.frame2.domain = "newCurrent"
self.assertEqual(frameDict.getFrame(frameDict.BASE).domain, "FRAME1")
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "FRAME2")
self.assertEqual(frameDict.getMapping().ident, "zoomMap")
self.checkPersistence(frameDict, typeFromChannel=ast.FrameSet)
self.checkDict(frameDict)
# make sure we can't add a frame with a duplicate domain name
# and that attempting to do so leave the FrameDict unchanged
duplicateFrame = ast.Frame(2, "Domain=FRAME1, Ident=duplicate")
with self.assertRaises(ValueError):
frameDict.addFrame(1, self.zoomMap, duplicateFrame)
self.assertEqual(frameDict.getAllDomains(), {"FRAME1", "FRAME2"})
self.assertEqual(frameDict.getFrame("FRAME1").ident, "f1")
self.checkDict(frameDict)
def test_FrameDictFrameMappingFrameConstructor(self):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
self.assertEqual(frameDict.nFrame, 2)
self.assertEqual(frameDict.base, 1)
self.assertEqual(frameDict.getIndex("FRAME1"), 1)
self.assertEqual(frameDict.current, 2)
self.assertEqual(frameDict.getIndex("frame2"), 2)
self.assertEqual(frameDict.getAllDomains(), {"FRAME1", "FRAME2"})
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 2)
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 1)
# make sure all objects were deep copied
self.frame1.domain = "newBase"
self.zoomMap.ident = "newMapping"
self.frame2.domain = "newCurrent"
self.assertEqual(frameDict.getFrame(frameDict.BASE).domain, "FRAME1")
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "FRAME2")
self.assertEqual(frameDict.getMapping().ident, "zoomMap")
self.checkPersistence(frameDict, typeFromChannel=ast.FrameSet)
self.checkDict(frameDict)
def test_FrameDictGetMapping(self):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
# make sure the zoomMap in frameDict is a deep copy of self.zoomMap
self.zoomMap.ident = "newMappingIdent"
zoomMapList = ( # all should be the same
frameDict.getMapping(frameDict.BASE, frameDict.CURRENT),
frameDict.getMapping("FRAME1", "FRAME2"),
frameDict.getMapping(frameDict.BASE, "frame2"),
frameDict.getMapping("frame1", frameDict.CURRENT),
)
for zoomMap in zoomMapList:
self.assertEqual(zoomMap.ident, "zoomMap")
self.assertEqual(self.zoomMap.getRefCount(), 1)
# make sure the zoomMapList are retrieved in the right direction
indata = np.array([[1.1, 2.1, 3.1], [1.2, 2.2, 3.2]])
predictedOut = indata * self.zoom
for zoomMap in zoomMapList:
assert_allclose(zoomMap.applyForward(indata), predictedOut)
# check that getMapping returns a deep copy
for i, zoomMap in enumerate(zoomMapList):
zoomMap.ident = "newIdent%s" % (i,)
self.assertEqual(zoomMap.getRefCount(), 1)
self.assertEqual(frameDict.getMapping().ident, "zoomMap")
# 5 = 1 in frameDict plus 4 retrieved copies in zoomMapList
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 5)
self.checkDict(frameDict)
# try to get invalid frames by name and index; test all combinations
# of the "from" and "to" index being valid or invalid
indexIsValidList = (
(1, True),
(3, False),
("Frame1", True),
("BadFrame", False),
("", False),
)
for fromIndex, fromValid in indexIsValidList:
for toIndex, toValid in indexIsValidList:
if fromValid and toValid:
mapping = frameDict.getMapping(fromIndex, toIndex)
self.assertIsInstance(mapping, ast.Mapping)
else:
with self.assertRaises((IndexError, RuntimeError)):
frameDict.getMapping(fromIndex, toIndex)
# make sure the errors did not mess up the FrameDict
self.assertEqual(frameDict.getAllDomains(), {"FRAME1", "FRAME2"})
self.checkDict(frameDict)
def test_FrameDictRemoveFrame(self):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
zoomMap2 = ast.ZoomMap(2, 1.3, "Ident=zoomMap2")
frame3 = ast.Frame(2, "Domain=FRAME3, Ident=f3")
frameDict.addFrame(2, zoomMap2, frame3)
self.assertEqual(frameDict.getAllDomains(), {"FRAME1", "FRAME2", "FRAME3"})
self.assertEqual(frameDict.getIndex("FRAME1"), 1)
self.assertEqual(frameDict.getIndex("FRAME2"), 2)
self.assertEqual(frameDict.getIndex("FRAME3"), 3)
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 4)
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 3)
# remove the frame named "FRAME1" by name
# this will also remove one of the two zoom maps
frameDict.removeFrame("FRAME1")
self.checkDict(frameDict)
self.assertEqual(frameDict.getAllDomains(), {"FRAME2", "FRAME3"})
self.assertEqual(frameDict.nFrame, 2)
self.assertEqual(frameDict.getIndex("FRAME2"), 1)
self.assertEqual(frameDict.getIndex("FRAME3"), 2)
self.assertEqual(frameDict.getFrame("FRAME2").domain, "FRAME2")
self.assertEqual(frameDict.getFrame("FRAME3").domain, "FRAME3")
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 3)
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 2)
# remove the frame "FRAME3" by index
# this will also remove the remaining zoom map
frameDict.removeFrame(2)
self.checkDict(frameDict)
self.assertEqual(frameDict.getAllDomains(), {"FRAME2"})
self.assertEqual(frameDict.nFrame, 1)
self.assertEqual(frameDict.getIndex("FRAME2"), 1)
self.assertEqual(frameDict.getFrame("FRAME2").domain, "FRAME2")
self.assertEqual(self.frame1.getNObject(), self.initialNumFrames + 2)
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 1)
frameDeep = frameDict.getFrame(1)
self.assertEqual(frameDeep.domain, "FRAME2")
# it is not allowed to remove the last frame
with self.assertRaises(RuntimeError):
frameDict.removeFrame(1)
self.checkDict(frameDict)
def test_FrameDictGetFrameAndGetIndex(self):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
self.assertEqual(frameDict.getIndex("frame1"), 1)
self.assertEqual(frameDict.getFrame(1).domain, "FRAME1")
self.assertEqual(frameDict.getFrame(frameDict.BASE).domain, "FRAME1")
self.assertEqual(frameDict.getFrame("FRAME1").domain, "FRAME1")
self.assertEqual(frameDict.getIndex("frame2"), 2)
self.assertEqual(frameDict.getFrame(2).domain, "FRAME2")
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "FRAME2")
self.assertEqual(frameDict.getFrame("FRAME2").domain, "FRAME2")
# test on invalid indices
for badDomain in ("badName", ""):
with self.assertRaises(IndexError):
frameDict.getFrame(badDomain)
with self.assertRaises(IndexError):
frameDict.getIndex(badDomain)
with self.assertRaises(RuntimeError):
frameDict.getFrame(3)
# make sure the errors did not mess up the FrameDict
self.assertEqual(frameDict.getAllDomains(), {"FRAME1", "FRAME2"})
self.checkDict(frameDict)
def test_FrameDictRemapFrame(self):
for useDomainForRemapFrame in (False, True):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
indata = np.array([
[0.0, 0.1, -1.5],
[5.1, 0.0, 3.1],
])
predictedOut1 = indata * self.zoom
assert_allclose(frameDict.applyForward(indata), predictedOut1)
assert_allclose(frameDict.applyInverse(predictedOut1), indata)
self.checkMappingPersistence(frameDict, indata)
shift = (0.5, -1.5)
shiftMap = ast.ShiftMap(shift, "Ident=shift")
initialNumShiftMap = shiftMap.getNObject()
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 1)
if useDomainForRemapFrame:
frameDict.remapFrame("FRAME1", shiftMap)
else:
frameDict.remapFrame(1, shiftMap)
self.assertEqual(self.zoomMap.getNObject(), self.initialNumZoomMap + 1)
self.assertEqual(shiftMap.getNObject(), initialNumShiftMap + 1)
predictedOut2 = (indata.T - shift).T * self.zoom
assert_allclose(frameDict.applyForward(indata), predictedOut2)
assert_allclose(frameDict.applyInverse(predictedOut2), indata)
def test_FrameDictPermutationSkyFrame(self):
"""Test permuting FrameDict axes using a SkyFrame
Permuting the axes of the current frame of a frame set
*in situ* (by calling `permAxes` on the frame set itself)
should update the connected mappings.
"""
# test with arbitrary values that will not be wrapped by SkyFrame
x = 0.257
y = 0.832
frame1 = ast.Frame(2)
unitMap = ast.UnitMap(2)
frame2 = ast.SkyFrame()
frameDict = ast.FrameDict(frame1, unitMap, frame2)
self.assertAlmostEqual(frameDict.applyForward([x, y]), [x, y])
self.assertAlmostEqual(frameDict.applyInverse([x, y]), [x, y])
# permuting the axes of the current frame also permutes the mapping
frameDict.permAxes([2, 1])
self.assertAlmostEqual(frameDict.applyForward([x, y]), [y, x])
self.assertAlmostEqual(frameDict.applyInverse([x, y]), [y, x])
# permuting again puts things back
frameDict.permAxes([2, 1])
self.assertAlmostEqual(frameDict.applyForward([x, y]), [x, y])
self.assertAlmostEqual(frameDict.applyInverse([x, y]), [x, y])
def test_FrameDictPermutationUnequal(self):
"""Test permuting FrameDict axes with nIn != nOut
Permuting the axes of the current frame of a frame set
*in situ* (by calling `permAxes` on the frame set itself)
should update the connected mappings.
Make nIn != nOut in order to test DM-9899
FrameDict.permAxes would fail if nIn != nOut
"""
# Initial mapping: 3 inputs, 2 outputs: 1-1, 2-2, 3=z
# Test using arbitrary values for x,y,z
x = 75.1
y = -53.2
z = 0.123
frame1 = ast.Frame(3)
permMap = ast.PermMap([1, 2, -1], [1, 2], [z])
frame2 = ast.Frame(2)
frameDict = ast.FrameDict(frame1, permMap, frame2)
self.assertAlmostEqual(frameDict.applyForward([x, y, z]), [x, y])
self.assertAlmostEqual(frameDict.applyInverse([x, y]), [x, y, z])
# permuting the axes of the current frame also permutes the mapping
frameDict.permAxes([2, 1])
self.assertAlmostEqual(frameDict.applyForward([x, y, z]), [y, x])
self.assertAlmostEqual(frameDict.applyInverse([x, y]), [y, x, z])
# permuting again puts things back
frameDict.permAxes([2, 1])
self.assertAlmostEqual(frameDict.applyForward([x, y, z]), [x, y])
self.assertAlmostEqual(frameDict.applyInverse([x, y]), [x, y, z])
def test_FrameDictSetBaseCurrent(self):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
self.assertEqual(frameDict.base, 1)
self.assertEqual(frameDict.current, 2)
self.assertEqual(frameDict.getIndex("frame1"), 1)
self.assertEqual(frameDict.getIndex("frame2"), 2)
indata = np.array([
[0.0, 0.1, -1.5],
[5.1, 0.0, 3.1],
])
predictedOut1 = indata.copy() * self.zoom
assert_allclose(frameDict.applyForward(indata), predictedOut1)
frameDict.setCurrent("FRAME1")
self.assertEqual(frameDict.base, 1)
self.assertEqual(frameDict.current, 1)
self.assertEqual(frameDict.getIndex("FRAME1"), 1)
self.assertEqual(frameDict.getIndex("FRAME2"), 2)
predictedOutput2 = indata.copy()
assert_allclose(frameDict.applyForward(indata), predictedOutput2)
frameDict.setBase("FRAME2")
self.assertEqual(frameDict.base, 2)
self.assertEqual(frameDict.current, 1)
self.assertEqual(frameDict.getIndex("FRAME1"), 1)
self.assertEqual(frameDict.getIndex("FRAME2"), 2)
predictedOutput3 = indata.copy() / self.zoom
assert_allclose(frameDict.applyForward(indata), predictedOutput3)
def test_FrameDictSetDomain(self):
frameDict = ast.FrameDict(self.frame1, self.zoomMap, self.frame2)
frameDict.setCurrent("FRAME1")
frameDict.setDomain("NEWFRAME1")
self.assertEqual(frameDict.getAllDomains(), {"NEWFRAME1", "FRAME2"})
self.assertEqual(frameDict.getIndex("newFrame1"), 1)
self.assertEqual(frameDict.getIndex("FRAME2"), 2)
frameDict.setCurrent("FRAME2")
frameDict.setDomain("NEWFRAME2")
self.assertEqual(frameDict.getAllDomains(), {"NEWFRAME1", "NEWFRAME2"})
self.assertEqual(frameDict.getIndex("NEWFRAME1"), 1)
self.assertEqual(frameDict.getIndex("NEWFRAME2"), 2)
# Renaming a domain to itself should have no effect
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "NEWFRAME2")
frameDict.setDomain("NEWFRAME2")
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "NEWFRAME2")
self.assertEqual(frameDict.getAllDomains(), {"NEWFRAME1", "NEWFRAME2"})
# Make sure setDomain cannot be used to rename a domain to a duplicate
# and that this leaves the frameDict unchanged
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "NEWFRAME2")
with self.assertRaises(ValueError):
frameDict.setDomain("NEWFRAME1")
self.assertEqual(frameDict.getFrame(frameDict.CURRENT).domain, "NEWFRAME2")
self.assertEqual(frameDict.getAllDomains(), {"NEWFRAME1", "NEWFRAME2"})
if __name__ == "__main__":
unittest.main()
| [
"rowen@uw.edu"
] | rowen@uw.edu |
b43b265ab5b8ec038ca93e7960531a9ce7c8c0a4 | 2aace9bb170363e181eb7520e93def25f38dbe5c | /build/idea-sandbox/system/python_stubs/-57053121/cv2/cv2/face_FacemarkKazemi.py | 73b898f65569c4285d3408241b623d11da0c7620 | [] | no_license | qkpqkp/PlagCheck | 13cb66fd2b2caa2451690bb72a2634bdaa07f1e6 | d229904674a5a6e46738179c7494488ca930045e | refs/heads/master | 2023-05-28T15:06:08.723143 | 2021-06-09T05:36:34 | 2021-06-09T05:36:34 | 375,235,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,957 | py | # encoding: utf-8
# module cv2.cv2
# from C:\Users\Doly\Anaconda3\lib\site-packages\cv2\cv2.cp37-win_amd64.pyd
# by generator 1.147
""" Python wrapper for OpenCV. """
# imports
import cv2.cv2 as # C:\Users\Doly\Anaconda3\lib\site-packages\cv2\cv2.cp37-win_amd64.pyd
import cv2.Error as Error # <module 'cv2.Error'>
import cv2.aruco as aruco # <module 'cv2.aruco'>
import cv2.bgsegm as bgsegm # <module 'cv2.bgsegm'>
import cv2.bioinspired as bioinspired # <module 'cv2.bioinspired'>
import cv2.cuda as cuda # <module 'cv2.cuda'>
import cv2.datasets as datasets # <module 'cv2.datasets'>
import cv2.detail as detail # <module 'cv2.detail'>
import cv2.dnn as dnn # <module 'cv2.dnn'>
import cv2.face as face # <module 'cv2.face'>
import cv2.fisheye as fisheye # <module 'cv2.fisheye'>
import cv2.flann as flann # <module 'cv2.flann'>
import cv2.ft as ft # <module 'cv2.ft'>
import cv2.hfs as hfs # <module 'cv2.hfs'>
import cv2.img_hash as img_hash # <module 'cv2.img_hash'>
import cv2.instr as instr # <module 'cv2.instr'>
import cv2.ipp as ipp # <module 'cv2.ipp'>
import cv2.kinfu as kinfu # <module 'cv2.kinfu'>
import cv2.line_descriptor as line_descriptor # <module 'cv2.line_descriptor'>
import cv2.linemod as linemod # <module 'cv2.linemod'>
import cv2.ml as ml # <module 'cv2.ml'>
import cv2.motempl as motempl # <module 'cv2.motempl'>
import cv2.multicalib as multicalib # <module 'cv2.multicalib'>
import cv2.ocl as ocl # <module 'cv2.ocl'>
import cv2.ogl as ogl # <module 'cv2.ogl'>
import cv2.omnidir as omnidir # <module 'cv2.omnidir'>
import cv2.optflow as optflow # <module 'cv2.optflow'>
import cv2.plot as plot # <module 'cv2.plot'>
import cv2.ppf_match_3d as ppf_match_3d # <module 'cv2.ppf_match_3d'>
import cv2.quality as quality # <module 'cv2.quality'>
import cv2.reg as reg # <module 'cv2.reg'>
import cv2.rgbd as rgbd # <module 'cv2.rgbd'>
import cv2.saliency as saliency # <module 'cv2.saliency'>
import cv2.samples as samples # <module 'cv2.samples'>
import cv2.structured_light as structured_light # <module 'cv2.structured_light'>
import cv2.text as text # <module 'cv2.text'>
import cv2.utils as utils # <module 'cv2.utils'>
import cv2.videoio_registry as videoio_registry # <module 'cv2.videoio_registry'>
import cv2.videostab as videostab # <module 'cv2.videostab'>
import cv2.xfeatures2d as xfeatures2d # <module 'cv2.xfeatures2d'>
import cv2.ximgproc as ximgproc # <module 'cv2.ximgproc'>
import cv2.xphoto as xphoto # <module 'cv2.xphoto'>
import cv2 as __cv2
class face_FacemarkKazemi(__cv2.face_Facemark):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
| [
"qinkunpeng2015@163.com"
] | qinkunpeng2015@163.com |
ac4251061dbf04a4b211f6dc6b24aac16bd3c392 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_misbegotten.py | 0a2f1ba98baecb49da1a281a971696f07f881ffd | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 463 | py |
#calss header
class _MISBEGOTTEN():
def __init__(self,):
self.name = "MISBEGOTTEN"
self.definitions = [u'badly or stupidly planned or designed: ', u'not deserving to be respected or thought valuable: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
17ada15b2b7a85d902eab53dcc44a0434d73b4ab | c7169415ae8abedd29ab83cddbcccb6768663062 | /image_tagging/dataset_a/twitter/predict_twitter_background.py | cc690218544bfe7ee85106a43155e431a16a7061 | [] | no_license | chrisWWU/cross_platform_feature_analysis | 26c33dd2adc00b7d8fbc24bfef45d6757b81ae1a | 572732554c73bdcb22f31bce5718fdf8beb77bd8 | refs/heads/master | 2022-12-29T11:11:18.910805 | 2020-10-16T11:38:04 | 2020-10-16T11:38:04 | 296,666,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,277 | py | from imageai.Prediction import ImagePrediction
import os
import pandas as pd
from PIL import Image
def clear_valid_path(path):
r = path.split('/')[7]
return r.replace('.jpg', '')
def get_image_tags(path_from, path_to, csv):
"""reads images and returns csv containing predictions, percentages and userid"""
# set up model
multiple_prediction = ImagePrediction()
multiple_prediction.setModelTypeAsResNet()
multiple_prediction.setModelPath(os.path.join(path_model))
multiple_prediction.loadModel()
# create list of profile pic filenames
pics = os.listdir(path_from)
all_images_array = []
# only use actual photos
for each_file in pics:
if each_file.endswith(".jpg") or each_file.endswith(".png"):
all_images_array.append(each_file)
# create path for each pic
path_pics = [f'{path_from + pic}' for pic in all_images_array]
valid_paths = []
# check for each image if its broken
for path in path_pics:
try:
im = Image.open(path)
valid_paths.append(path)
except IOError:
print(f'{path}: image is broken')
# create list of valid ids from valid paths list
valid_ids = [clear_valid_path(x) for x in valid_paths]
# predict valid paths
res = multiple_prediction.predictMultipleImages(valid_paths, result_count_per_image=5)
df = pd.DataFrame(columns=['prediction', 'percentage', 'twitterusername'])
c = 0
# append each prediction to df
for dict in res:
interdf = pd.DataFrame(
{'prediction': dict['predictions'],
'percentage': dict['percentage_probabilities'],
'twitterusername': valid_ids[c]}
)
df = df.append(interdf)
c += 1
df = df.reset_index(drop=True)
if csv:
df.to_csv(path_to)
if __name__ == '__main__':
dataset = 'dataset_a'
tw_standard_pic = f'../../../../data/{dataset}/twitter/twitter_profilepics/shardproducton.jpg'
path_from = f'../../../../data/{dataset}/twitter/twitter_profilepics/'
path_to = 'twitter_background_prediction/'
path_model = '../../image_pred_models/resnet50_weights_tf_dim_ordering_tf_kernels.h5'
csv = False
get_image_tags(path_from, path_to, csv) | [
"christian28bewerbung@gmail.com"
] | christian28bewerbung@gmail.com |
f2b444d8bf9b6798f4553ad51384063997abeeb3 | ee8c4c954b7c1711899b6d2527bdb12b5c79c9be | /assessment2/amazon/run/core/controllers/rot.py | bde30a20fd61ad84406edc6c861f513cad32bc25 | [] | no_license | sqlconsult/byte | 02ac9899aebea4475614969b594bfe2992ffe29a | 548f6cb5038e927b54adca29caf02c981fdcecfc | refs/heads/master | 2021-01-25T14:45:42.120220 | 2018-08-11T23:45:31 | 2018-08-11T23:45:31 | 117,135,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | #!/usr/bin/env python3
from flask import Blueprint, Flask, render_template, request, url_for
controller = Blueprint('rot', __name__, url_prefix='/rot')
# @controller.route('/<string:title>', methods=['GET'])
# def lookup(title):
# if title == 'Republic': # TODO 2
# return render_template('republic.html') # TODO 2
# else:
# pass
| [
"sqlconsult@hotmail.com"
] | sqlconsult@hotmail.com |
e4a8db30865c1f641e1659dc8663a83b73f24ba0 | e2468c60810764971f2dae2b959650b553042810 | /32_longParentheses.py | 424c2c33dda1d69ea95600848da000f3d81f0741 | [] | no_license | awesome-liuxiao/leetcodesolution | 9a01b6f36266149ae7fe00625785d1ada41f190a | 3637cd1347b5153daeeb855ebc44cfea5649fc90 | refs/heads/master | 2023-06-08T13:42:14.653688 | 2023-06-01T08:39:35 | 2023-06-01T08:39:35 | 213,380,224 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | class Solution:
def longestValidParentheses(self, s: str) -> int:
res = 0
sLen = len(s)
stack = []
start = 0
for i in range(sLen):
if s[i] == '(':
stack.append(i)
else:
if stack == []:
start = i+1
else:
stack.pop()
if stack == []:
res = max(res, i-start+1)
else:
res = max(res, i - stack[len(stack)-1])
# print(res)
return res
x = Solution()
data1 = "(()" # 2
# data2 = ")()())" # 4
data3 = "()(()" # 2
data4 = "(((())))" # 8
# x.longestValidParentheses(data1)
# x.longestValidParentheses(data2)
x.longestValidParentheses(data3)
# x.longestValidParentheses(data4)
| [
"lio4072@hotmail.com"
] | lio4072@hotmail.com |
beb00c8795fb8fcbdafe263d42cb4a3b1821cc54 | 6bf4e54f8ae95582b73bb969ba44069c64e87651 | /kdhi/main_site/migrations/0022_article_update_date.py | 52c02679337d35350bcf54c1168d52433162fa30 | [] | no_license | speedycowenator/kdhi_migration | 4bc983c4656a2a87cb056461bfb4219e38da1a85 | 422b2e3f142a30c81f428fb8eaa813e4a71d56fc | refs/heads/master | 2022-11-14T13:27:51.520697 | 2020-07-02T19:31:12 | 2020-07-02T19:31:12 | 246,138,874 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | # Generated by Django 2.2.5 on 2020-03-23 22:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_site', '0021_auto_20200323_1821'),
]
operations = [
migrations.AddField(
model_name='article',
name='update_date',
field=models.DateField(auto_now=True),
),
]
| [
"54556114+speedycowenator@users.noreply.github.com"
] | 54556114+speedycowenator@users.noreply.github.com |
9d01f2e31bb38d3ccda4e090566d0ce168341b29 | 9b422078f4ae22fe16610f2ebc54b8c7d905ccad | /xlsxwriter/test/comparison/test_optimize06.py | 70a303d77c74404718a1c41d06dcd3a44592ecf3 | [
"BSD-2-Clause-Views"
] | permissive | projectsmahendra/XlsxWriter | 73d8c73ea648a911deea63cb46b9069fb4116b60 | 9b9d6fb283c89af8b6c89ad20f72b8208c2aeb45 | refs/heads/master | 2023-07-21T19:40:41.103336 | 2023-07-08T16:54:37 | 2023-07-08T16:54:37 | 353,636,960 | 0 | 0 | NOASSERTION | 2021-04-01T08:57:21 | 2021-04-01T08:57:20 | null | UTF-8 | Python | false | false | 1,138 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2021, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparison_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('optimize06.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename, {'constant_memory': True, 'in_memory': False})
worksheet = workbook.add_worksheet()
# Test that control characters and any other single byte characters are
# handled correctly by the SharedStrings module. We skip chr 34 = " in
# this test since it isn't encoded by Excel as ".
ordinals = list(range(0, 34))
ordinals.extend(range(35, 128))
for i in ordinals:
worksheet.write_string(i, 0, chr(i))
workbook.close()
self.assertExcelEqual()
| [
"jmcnamara@cpan.org"
] | jmcnamara@cpan.org |
af725df8bc8c67caf2efdca7c84665f967cc3fd5 | 463716b1e2dacba48802b3a58272de732c3e3382 | /scripts/earth_capture/OCP_moon_moon_leg.py | b375653c7091984a75bd58a87a6f76a5796cb94b | [] | no_license | TomSemblanet/Asteroid-Retrieval-Mission | e6afa5446ee27268faa8a56d72028d8649a24646 | 9d4b1809e868aec674d6bf3c48958b23418290e7 | refs/heads/main | 2023-06-12T20:45:21.493228 | 2021-07-01T14:03:38 | 2021-07-01T14:03:38 | 348,095,525 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,891 | py | import sys
import pickle
import numpy as np
import matplotlib.pyplot as plt
import cppad_py
from scipy.interpolate import interp1d
from collocation.GL_V.src.problem import Problem
from collocation.GL_V.src.optimization import Optimization
class MoonMoonLeg(Problem):
""" CR3BP : Moon-Moon Leg optimal control problem """
def __init__(self, cr3bp, mass0, Tmax, trajectory, time):
""" Initialization of the `GoddardRocket` class """
n_states = 7
n_controls = 4
n_st_path_con = 0
n_ct_path_con = 1
n_event_con = 13
n_f_par = 0
n_nodes = 200
Problem.__init__(self, n_states, n_controls, n_st_path_con, n_ct_path_con,
n_event_con, n_f_par, n_nodes)
# Set some attributs
self.cr3bp = cr3bp
self.mass0 = mass0 # [kg]
self.Tmax = Tmax # [kN]
self.trajectory = trajectory # [L] | [L/T]
self.time = time # [T]
def set_constants(self):
""" Setting of the problem constants """
self.Tmax /= self.cr3bp.L / self.cr3bp.T**2 # Thrusts dimensioning
self.g0 = 9.80665e-3 / (self.cr3bp.L / self.cr3bp.T**2)
self.Isp = 3000 / self.cr3bp.T
def set_boundaries(self):
""" Setting of the states, controls, free-parameters, initial and final times
boundaries """
# States boundaries
# X [-]
self.low_bnd.states[0] = -2
self.upp_bnd.states[0] = 2
# Y [-]
self.low_bnd.states[1] = -2
self.upp_bnd.states[1] = 2
# Z [-]
self.low_bnd.states[2] = -2
self.upp_bnd.states[2] = 2
# Vx [-]
self.low_bnd.states[3] = -10
self.upp_bnd.states[3] = 10
# Vy [-]
self.low_bnd.states[4] = -10
self.upp_bnd.states[4] = 10
# Vz [-]
self.low_bnd.states[5] = -10
self.upp_bnd.states[5] = 10
# m [kg]
self.low_bnd.states[6] = 1e-6
self.upp_bnd.states[6] = self.mass0
# T [-]
self.low_bnd.controls[0] = 1e-6
self.upp_bnd.controls[0] = self.Tmax
# Tx [-]
self.low_bnd.controls[1] = - 1
self.upp_bnd.controls[1] = 1
# Ty [-]
self.low_bnd.controls[2] = - 1
self.upp_bnd.controls[2] = 1
# Tz [-]
self.low_bnd.controls[3] = - 1
self.upp_bnd.controls[3] = 1
# Initial and final times boundaries
self.low_bnd.ti = self.upp_bnd.ti = self.time[0]
self.low_bnd.tf = 0.5 * self.time[-1]
self.upp_bnd.tf = 2.5 * self.time[-1]
def event_constraints(self, xi, ui, xf, uf, ti, tf, f_prm):
""" Computation of the events constraints """
events = np.ndarray((self.prm['n_event_con'], 1),
dtype=cppad_py.a_double)
x_i, y_i, z_i, vx_i, vy_i, vz_i, m_i = xi
x_f, y_f, z_f, vx_f, vy_f, vz_f, _ = xf
events[0] = x_i - self.trajectory[0, 0]
events[1] = y_i - self.trajectory[1, 0]
events[2] = z_i - self.trajectory[2, 0]
events[3] = vx_i - self.trajectory[3, 0]
events[4] = vy_i - self.trajectory[4, 0]
events[5] = vz_i - self.trajectory[5, 0]
events[6] = x_f - self.trajectory[0, -1]
events[7] = y_f - self.trajectory[1, -1]
events[8] = z_f - self.trajectory[2, -1]
events[9] = vx_f - self.trajectory[3, -1]
events[10] = vy_f - self.trajectory[4, -1]
events[11] = vz_f - self.trajectory[5, -1]
events[12] = m_i - self.mass0
return events
def set_events_constraints_boundaries(self):
""" Setting of the events constraints boundaries """
self.low_bnd.event[0] = self.upp_bnd.event[0] = 0
self.low_bnd.event[1] = self.upp_bnd.event[1] = 0
self.low_bnd.event[2] = self.upp_bnd.event[2] = 0
self.low_bnd.event[3] = self.upp_bnd.event[3] = 0
self.low_bnd.event[4] = self.upp_bnd.event[4] = 0
self.low_bnd.event[5] = self.upp_bnd.event[5] = 0
self.low_bnd.event[6] = self.upp_bnd.event[6] = 0
self.low_bnd.event[7] = self.upp_bnd.event[7] = 0
self.low_bnd.event[8] = self.upp_bnd.event[8] = 0
self.low_bnd.event[9] = self.upp_bnd.event[9] = 0
self.low_bnd.event[10] = self.upp_bnd.event[10] = 0
self.low_bnd.event[11] = self.upp_bnd.event[11] = 0
self.low_bnd.event[12] = self.upp_bnd.event[12] = 0
def path_constraints(self, states, controls, states_add, controls_add, controls_col, f_par):
st_path = np.ndarray((self.prm['n_st_path_con'],
2*self.prm['n_nodes']-1), dtype=cppad_py.a_double)
ct_path = np.ndarray((self.prm['n_ct_path_con'],
4*self.prm['n_nodes']-3), dtype=cppad_py.a_double)
# Thrust magnitude in x, y and z directions in the synodic frame [-]
ux = np.concatenate((controls[1], controls_add[1], controls_col[1]))
uy = np.concatenate((controls[2], controls_add[2], controls_col[2]))
uz = np.concatenate((controls[3], controls_add[3], controls_col[3]))
u2 = ux*ux + uy*uy + uz*uz
ct_path[0] = u2 - 1
return st_path, ct_path
def set_path_constraints_boundaries(self):
""" Setting of the path constraints boundaries """
self.low_bnd.ct_path[0] = self.upp_bnd.ct_path[0] = 0
def dynamics(self, states, controls, f_prm, expl_int=False):
""" Computation of the states derivatives """
if expl_int == False:
dynamics = np.ndarray(
(states.shape[0], states.shape[1]), dtype=cppad_py.a_double)
else:
dynamics = np.zeros(len(states))
# Mass [kg]
m = states[6]
# Extraction of controls
T = controls[0]
ux, uy, uz = controls[1:]
x_dot, y_dot, z_dot, vx_dot, vy_dot, vz_dot = self.cr3bp.states_derivatives(0, states[:-1])
dynamics[0] = x_dot
dynamics[1] = y_dot
dynamics[2] = z_dot
dynamics[3] = vx_dot + T / m * ux
dynamics[4] = vy_dot + T / m * uy
dynamics[5] = vz_dot + T / m * uz
dynamics[6] = - T / self.Isp / self.g0
return dynamics
def end_point_cost(self, ti, xi, tf, xf, f_prm):
""" Computation of the end point cost (Mayer term) """
mf = xf[-1]
return - mf / self.mass0
def set_initial_guess(self):
""" Setting of the initial guess for the states, controls, free-parameters
and time grid """
# Interpolation of the states
f_x = interp1d(self.time, self.trajectory[0])
f_y = interp1d(self.time, self.trajectory[1])
f_z = interp1d(self.time, self.trajectory[2])
f_vx = interp1d(self.time, self.trajectory[3])
f_vy = interp1d(self.time, self.trajectory[4])
f_vz = interp1d(self.time, self.trajectory[5])
# Time
self.initial_guess.time = np.linspace(self.time[0], self.time[-1], self.prm['n_nodes'])
# States
self.initial_guess.states = np.ndarray(
shape=(self.prm['n_states'], self.prm['n_nodes']))
self.initial_guess.states[0] = f_x(self.initial_guess.time)
self.initial_guess.states[1] = f_y(self.initial_guess.time)
self.initial_guess.states[2] = f_z(self.initial_guess.time)
self.initial_guess.states[3] = f_vx(self.initial_guess.time)
self.initial_guess.states[4] = f_vy(self.initial_guess.time)
self.initial_guess.states[5] = f_vz(self.initial_guess.time)
self.initial_guess.states[6] = self.mass0 * np.ones(self.prm['n_nodes'])
# Controls
self.initial_guess.controls = np.ndarray(
shape=(self.prm['n_controls'], self.prm['n_nodes']))
self.initial_guess.controls = np.zeros((4, self.prm['n_nodes']))
| [
"tomsemblanet@hotmail.fr"
] | tomsemblanet@hotmail.fr |
c609fe49aaa8f7ba0b62b3f030bf3d3de0ce70a8 | 3ead569228d28e173868dc307acb78f3e41947a1 | /greedy/thisisCT_Q04.py | 0ae4ce3ac5ed748b4bb35bc57fb27828fe4a4363 | [] | no_license | pjhq2/Algorithm | 96863d8343fb30fda9fe64b4d0f4abd4a1d7a81b | dd12ed47472f68d3a979d604aa32ca82e1c656b4 | refs/heads/main | 2023-08-11T11:14:10.720161 | 2021-10-07T14:01:58 | 2021-10-07T14:01:58 | 386,916,139 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | # 04. 만들 수 없는 금액
N = int(input())
money = list(map(int, input().split()))
money.sort()
# x-1원까지 만들 수 있을 때, x를 만들 수 있는가?
target = 1
for x in money:
if target < x:
break
target += x
print(target) | [
"pkonu7@gmail.com"
] | pkonu7@gmail.com |
97847a90e19953edb59b4fc72b64ea864c749fe6 | 67377e04b769338d6370b20126aa09af26ffee66 | /tests/test_plotting_multiple_keras_histories.py | afc1400e47d9dd91a10fb279393b766308a6e093 | [
"MIT"
] | permissive | LucaCappelletti94/plot_keras_history | a9fe2eee28f8021897a7e937937163d99e1c704d | 1383a30e5659298d749678b75ea1d02f3bd73275 | refs/heads/master | 2023-05-23T18:34:57.090240 | 2022-11-19T12:00:40 | 2022-11-19T12:00:40 | 185,058,056 | 16 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,169 | py | """Test to check if multiple histories plots look ok."""
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
import numpy as np
from plot_keras_history import plot_history
from extra_keras_metrics import get_minimal_multiclass_metrics
import matplotlib.pyplot as plt
import matplotlib
matplotlib.use("Agg")
def test_plotting_keras_history_object():
histories = []
for _ in range(5):
model = Sequential([
Dense(1, activation="sigmoid")
])
model.compile(
optimizer="nadam",
loss="binary_crossentropy",
metrics=get_minimal_multiclass_metrics()
)
size = 1000
X = np.random.uniform(
low=-1,
high=+1,
size=(size, 100)
)
y = np.mean(X, axis=1) > 0
histories.append(model.fit(
X[:size//2], y[:size//2],
batch_size=size//2,
validation_data=(X[size//2:], y[size//2:]),
validation_batch_size=size//2,
epochs=200,
verbose=False
))
plot_history(histories, path="./plots/multiple_histories.png")
| [
"cappelletti.luca94@gmail.com"
] | cappelletti.luca94@gmail.com |
f9a9d3d90bc48ff33ac94a9bdc430acb9e17d70f | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/bar/marker/colorbar/_ypad.py | fad1f6fcec651e9cb3a069e1030d2a0ba6a7a075 | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 442 | py | import _plotly_utils.basevalidators
class YpadValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="ypad", parent_name="bar.marker.colorbar", **kwargs):
super(YpadValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
**kwargs
)
| [
"noreply@github.com"
] | hugovk.noreply@github.com |
1cb2d663dd85e185e94758e265d09d2776abc213 | 89ee7302d7a6c53a8370315c15c136322766fb66 | /ch13/dframe_def2.py | 6da23eb51ded62a876d48b8ced783873bdc8d8de | [] | no_license | nbvc1003/python | 34424ce577335faf180160a82c1ba59b3233030b | bae2f2a066fbde9107e6a3cd26b82de47e71759e | refs/heads/master | 2020-09-04T20:18:50.102871 | 2020-01-29T06:21:47 | 2020-01-29T06:21:47 | 219,880,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | import pandas as pd
# dictionary
data = {
"2015": [9904312, 3448737, 2890451, 2466052], # 열은 같은 데이터 타입
"2010": [9631482, 3393191, 2632035, 2431774],
"2005": [9762546, 3512547, 2517680, 2456016],
"2000": [9853972, 3655437, 2466338, 2473990],
"지역": ["수도권", "경상권", "수도권", "경상권"],
"2010-2015 증가율": [0.0283, 0.0163, 0.0982, 0.0141]
}
column = ["지역","2015","2010","2005","2000","2010-2015 증가율"]
index = ["서울","부산","인천","대구"]
df = pd.DataFrame(data, index=index, columns=column)
print(df)
# 인천삭제
df = df.drop('인천')
print(df)
# 서울 대구 삭제
df = df.drop(['서울','대구'])
print(df)
#2010년 삭제
df = df.drop('2010', axis=1)
print(df) | [
"nbvc@nate.com"
] | nbvc@nate.com |
3da28a632e3b588d1f02152ca18f53abab8cb077 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/third_party/google/auth/compute_engine/credentials.py | 30ffb162bca44fa2bf453997356e51731500b46c | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 17,059 | py | # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Compute Engine credentials.
This module provides authentication for an application running on Google
Compute Engine using the Compute Engine metadata server.
"""
import datetime
import six
from google.auth import _helpers
from google.auth import credentials
from google.auth import exceptions
from google.auth import iam
from google.auth import jwt
from google.auth import metrics
from google.auth.compute_engine import _metadata
from google.oauth2 import _client
class Credentials(credentials.Scoped, credentials.CredentialsWithQuotaProject):
"""Compute Engine Credentials.
These credentials use the Google Compute Engine metadata server to obtain
OAuth 2.0 access tokens associated with the instance's service account,
and are also used for Cloud Run, Flex and App Engine (except for the Python
2.7 runtime, which is supported only on older versions of this library).
For more information about Compute Engine authentication, including how
to configure scopes, see the `Compute Engine authentication
documentation`_.
.. note:: On Compute Engine the metadata server ignores requested scopes.
On Cloud Run, Flex and App Engine the server honours requested scopes.
.. _Compute Engine authentication documentation:
https://cloud.google.com/compute/docs/authentication#using
"""
def __init__(
self,
service_account_email="default",
quota_project_id=None,
scopes=None,
default_scopes=None,
):
"""
Args:
service_account_email (str): The service account email to use, or
'default'. A Compute Engine instance may have multiple service
accounts.
quota_project_id (Optional[str]): The project ID used for quota and
billing.
scopes (Optional[Sequence[str]]): The list of scopes for the credentials.
default_scopes (Optional[Sequence[str]]): Default scopes passed by a
Google client library. Use 'scopes' for user-defined scopes.
"""
super(Credentials, self).__init__()
self._service_account_email = service_account_email
self._quota_project_id = quota_project_id
self._scopes = scopes
self._default_scopes = default_scopes
def _retrieve_info(self, request):
"""Retrieve information about the service account.
Updates the scopes and retrieves the full service account email.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
"""
info = _metadata.get_service_account_info(
request, service_account=self._service_account_email
)
self._service_account_email = info["email"]
# Don't override scopes requested by the user.
if self._scopes is None:
self._scopes = info["scopes"]
def _metric_header_for_usage(self):
return metrics.CRED_TYPE_SA_MDS
def refresh(self, request):
"""Refresh the access token and scopes.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached if if the instance has not
credentials.
"""
scopes = self._scopes if self._scopes is not None else self._default_scopes
try:
self._retrieve_info(request)
self.token, self.expiry = _metadata.get_service_account_token(
request, service_account=self._service_account_email, scopes=scopes
)
except exceptions.TransportError as caught_exc:
new_exc = exceptions.RefreshError(caught_exc)
six.raise_from(new_exc, caught_exc)
@property
def service_account_email(self):
"""The service account email.
.. note:: This is not guaranteed to be set until :meth:`refresh` has been
called.
"""
return self._service_account_email
@property
def requires_scopes(self):
return not self._scopes
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
return self.__class__(
service_account_email=self._service_account_email,
quota_project_id=quota_project_id,
scopes=self._scopes,
)
@_helpers.copy_docstring(credentials.Scoped)
def with_scopes(self, scopes, default_scopes=None):
# Compute Engine credentials can not be scoped (the metadata service
# ignores the scopes parameter). App Engine, Cloud Run and Flex support
# requesting scopes.
return self.__class__(
scopes=scopes,
default_scopes=default_scopes,
service_account_email=self._service_account_email,
quota_project_id=self._quota_project_id,
)
_DEFAULT_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
_DEFAULT_TOKEN_URI = "https://www.googleapis.com/oauth2/v4/token"
class IDTokenCredentials(
credentials.CredentialsWithQuotaProject,
credentials.Signing,
credentials.CredentialsWithTokenUri,
):
"""Open ID Connect ID Token-based service account credentials.
These credentials relies on the default service account of a GCE instance.
ID token can be requested from `GCE metadata server identity endpoint`_, IAM
token endpoint or other token endpoints you specify. If metadata server
identity endpoint is not used, the GCE instance must have been started with
a service account that has access to the IAM Cloud API.
.. _GCE metadata server identity endpoint:
https://cloud.google.com/compute/docs/instances/verifying-instance-identity
"""
def __init__(
self,
request,
target_audience,
token_uri=None,
additional_claims=None,
service_account_email=None,
signer=None,
use_metadata_identity_endpoint=False,
quota_project_id=None,
):
"""
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token. The ID Token's ``aud`` claim
will be set to this string.
token_uri (str): The OAuth 2.0 Token URI.
additional_claims (Mapping[str, str]): Any additional claims for
the JWT assertion used in the authorization grant.
service_account_email (str): Optional explicit service account to
use to sign JWT tokens.
By default, this is the default GCE service account.
signer (google.auth.crypt.Signer): The signer used to sign JWTs.
In case the signer is specified, the request argument will be
ignored.
use_metadata_identity_endpoint (bool): Whether to use GCE metadata
identity endpoint. For backward compatibility the default value
is False. If set to True, ``token_uri``, ``additional_claims``,
``service_account_email``, ``signer`` argument should not be set;
otherwise ValueError will be raised.
quota_project_id (Optional[str]): The project ID used for quota and
billing.
Raises:
ValueError:
If ``use_metadata_identity_endpoint`` is set to True, and one of
``token_uri``, ``additional_claims``, ``service_account_email``,
``signer`` arguments is set.
"""
super(IDTokenCredentials, self).__init__()
self._quota_project_id = quota_project_id
self._use_metadata_identity_endpoint = use_metadata_identity_endpoint
self._target_audience = target_audience
if use_metadata_identity_endpoint:
if token_uri or additional_claims or service_account_email or signer:
raise exceptions.MalformedError(
"If use_metadata_identity_endpoint is set, token_uri, "
"additional_claims, service_account_email, signer arguments"
" must not be set"
)
self._token_uri = None
self._additional_claims = None
self._signer = None
if service_account_email is None:
sa_info = _metadata.get_service_account_info(request)
self._service_account_email = sa_info["email"]
else:
self._service_account_email = service_account_email
if not use_metadata_identity_endpoint:
if signer is None:
signer = iam.Signer(
request=request,
credentials=Credentials(),
service_account_email=self._service_account_email,
)
self._signer = signer
self._token_uri = token_uri or _DEFAULT_TOKEN_URI
if additional_claims is not None:
self._additional_claims = additional_claims
else:
self._additional_claims = {}
def with_target_audience(self, target_audience):
"""Create a copy of these credentials with the specified target
audience.
Args:
target_audience (str): The intended audience for these credentials,
used when requesting the ID Token.
Returns:
google.auth.service_account.IDTokenCredentials: A new credentials
instance.
"""
# since the signer is already instantiated,
# the request is not needed
if self._use_metadata_identity_endpoint:
return self.__class__(
None,
target_audience=target_audience,
use_metadata_identity_endpoint=True,
quota_project_id=self._quota_project_id,
)
else:
return self.__class__(
None,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=target_audience,
additional_claims=self._additional_claims.copy(),
signer=self.signer,
use_metadata_identity_endpoint=False,
quota_project_id=self._quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithQuotaProject)
def with_quota_project(self, quota_project_id):
# since the signer is already instantiated,
# the request is not needed
if self._use_metadata_identity_endpoint:
return self.__class__(
None,
target_audience=self._target_audience,
use_metadata_identity_endpoint=True,
quota_project_id=quota_project_id,
)
else:
return self.__class__(
None,
service_account_email=self._service_account_email,
token_uri=self._token_uri,
target_audience=self._target_audience,
additional_claims=self._additional_claims.copy(),
signer=self.signer,
use_metadata_identity_endpoint=False,
quota_project_id=quota_project_id,
)
@_helpers.copy_docstring(credentials.CredentialsWithTokenUri)
def with_token_uri(self, token_uri):
# since the signer is already instantiated,
# the request is not needed
if self._use_metadata_identity_endpoint:
raise exceptions.MalformedError(
"If use_metadata_identity_endpoint is set, token_uri" " must not be set"
)
else:
return self.__class__(
None,
service_account_email=self._service_account_email,
token_uri=token_uri,
target_audience=self._target_audience,
additional_claims=self._additional_claims.copy(),
signer=self.signer,
use_metadata_identity_endpoint=False,
quota_project_id=self.quota_project_id,
)
def _make_authorization_grant_assertion(self):
"""Create the OAuth 2.0 assertion.
This assertion is used during the OAuth 2.0 grant to acquire an
ID token.
Returns:
bytes: The authorization grant assertion.
"""
now = _helpers.utcnow()
lifetime = datetime.timedelta(seconds=_DEFAULT_TOKEN_LIFETIME_SECS)
expiry = now + lifetime
payload = {
"iat": _helpers.datetime_to_secs(now),
"exp": _helpers.datetime_to_secs(expiry),
# The issuer must be the service account email.
"iss": self.service_account_email,
# The audience must be the auth token endpoint's URI
"aud": self._token_uri,
# The target audience specifies which service the ID token is
# intended for.
"target_audience": self._target_audience,
}
payload.update(self._additional_claims)
token = jwt.encode(self._signer, payload)
return token
def _call_metadata_identity_endpoint(self, request):
"""Request ID token from metadata identity endpoint.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Returns:
Tuple[str, datetime.datetime]: The ID token and the expiry of the ID token.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached or if the instance has no credentials.
ValueError: If extracting expiry from the obtained ID token fails.
"""
try:
path = "instance/service-accounts/default/identity"
params = {"audience": self._target_audience, "format": "full"}
metrics_header = {
metrics.API_CLIENT_HEADER: metrics.token_request_id_token_mds()
}
id_token = _metadata.get(
request, path, params=params, headers=metrics_header
)
except exceptions.TransportError as caught_exc:
new_exc = exceptions.RefreshError(caught_exc)
six.raise_from(new_exc, caught_exc)
_, payload, _, _ = jwt._unverified_decode(id_token)
return id_token, datetime.datetime.fromtimestamp(payload["exp"])
def refresh(self, request):
"""Refreshes the ID token.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the credentials could
not be refreshed.
ValueError: If extracting expiry from the obtained ID token fails.
"""
if self._use_metadata_identity_endpoint:
self.token, self.expiry = self._call_metadata_identity_endpoint(request)
else:
assertion = self._make_authorization_grant_assertion()
access_token, expiry, _ = _client.id_token_jwt_grant(
request, self._token_uri, assertion
)
self.token = access_token
self.expiry = expiry
@property # type: ignore
@_helpers.copy_docstring(credentials.Signing)
def signer(self):
return self._signer
def sign_bytes(self, message):
"""Signs the given message.
Args:
message (bytes): The message to sign.
Returns:
bytes: The message's cryptographic signature.
Raises:
ValueError:
Signer is not available if metadata identity endpoint is used.
"""
if self._use_metadata_identity_endpoint:
raise exceptions.InvalidOperation(
"Signer is not available if metadata identity endpoint is used"
)
return self._signer.sign(message)
@property
def service_account_email(self):
"""The service account email."""
return self._service_account_email
@property
def signer_email(self):
return self._service_account_email
| [
"cloudsdk.mirror@gmail.com"
] | cloudsdk.mirror@gmail.com |
76582253feea01bffeb2c528387633c0af0ee6aa | 8acffb8c4ddca5bfef910e58d3faa0e4de83fce8 | /ml-flask/Lib/site-packages/pandas/tests/util/test_assert_index_equal.py | d49e5ff9c4a7acb872d67258a15a3bba1db6dc40 | [
"MIT"
] | permissive | YaminiHP/SimilitudeApp | 8cbde52caec3c19d5fa73508fc005f38f79b8418 | 005c59894d8788c97be16ec420c0a43aaec99b80 | refs/heads/master | 2023-06-27T00:03:00.404080 | 2021-07-25T17:51:27 | 2021-07-25T17:51:27 | 389,390,951 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a7a930c51ee077a3cf9db15abd6751d1d7e0fc22968c380545f80e6c3f58a838
size 5664
| [
"yamprakash130@gmail.com"
] | yamprakash130@gmail.com |
bf3b8438d8ee625611afccaa3eb3e39f83b7f91c | 272ae95716e530d538937ded59ec5b6e0b6d4db8 | /섹션 4/10. 역수열/AA.py | ac30788d08d5d305fed5b5beddba53a6d2fd1329 | [] | no_license | gogoheejun/algorithm | 83a1cb30bff5c349f53be16764e517a46e99cf1c | 39e999abf7170f434a7ac6e1f698f066e55aca03 | refs/heads/main | 2023-06-22T13:06:32.135917 | 2021-07-25T15:46:19 | 2021-07-25T15:46:19 | 383,379,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 336 | py | import sys
# sys.stdin = open("input.txt", "r")
n = int(input())
a = list(map(int, input().split()))
a.insert(0, 0)
seq = [0]*n
for i in range(1, n):
for j in range(n):
if a[i] == 0 and seq[j] == 0:
seq[j] = i
break
elif seq[j] == 0:
a[i] -= 1
for x in seq:
print(x, end=" ")
| [
"heejjuunn@gmail.com"
] | heejjuunn@gmail.com |
9bc46ea84932af7397f0c23c585801421a479073 | 3e23aaf1d482843e3640dc2721ab887082063b51 | /num201_300/num291_300/num300.py | 5981af0f9bb0efe30350182a50370d4ece08e4b8 | [] | no_license | guozhaoxin/leetcode | b19be28c0dc82fa7a5126edafa7c77ae2c77f22e | 807ba32ed7802b756e93dfe44264dac5bb9317a0 | refs/heads/master | 2020-04-01T10:29:49.375239 | 2019-02-24T03:27:18 | 2019-02-24T03:27:18 | 153,120,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,887 | py | #encoding:utf8
__author__ = 'gold'
'''
300. Longest Increasing Subsequence
Given an unsorted array of integers, find the length of longest increasing subsequence.
Example:
Input: [10,9,2,5,3,7,101,18]
Output: 4
Explanation: The longest increasing subsequence is [2,3,7,101], therefore the length is 4.
Note:
There may be more than one LIS combination, it is only necessary for you to return the length.
Your algorithm should run in O(n2) complexity.
Follow up: Could you improve it to O(n log n) time complexity?
Accepted
167,626
Submissions
424,735
'''
class Solution:
def lengthOfLIS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) <= 1:
return len(nums)
longestSubse = [0] * len(nums)
for index in range(len(nums)):
tempLongest = 0
for j in range(0,index):
if nums[index] > nums[j]:
tempLongest = max(tempLongest,longestSubse[j])
longestSubse[index] = tempLongest + 1
return max(longestSubse)
class Solution(object):
def lengthOfLIS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) < 2: return len(nums)
compare = []
for i in range(len(nums) - 1, -1, -1):
if not compare:
compare.append(nums[i])
else:
if nums[i] < compare[-1]:
compare.append(nums[i])
elif nums[i] == compare[-1]:
continue
else:
pos = self.find(compare, 0, len(compare) - 1, nums[i])
compare[pos] = nums[i]
print(compare)
return len(compare)
def find(self, compare, start, end, num):
if compare[start] <= num: return start
if compare[end] >= num: return end
if start + 1 == end or start == end:
return end
m = (start + end) // 2
if num == compare[m]:
return m
elif num < compare[m]:
return self.find(compare, m, end, num)
else:
return self.find(compare, start, m, num)
class Solution(object):
def lengthOfLIS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if len(nums) <= 1:
return len(nums)
subStack = [nums[-1]] #先把最后一个塞进去,省的后边判断来判断去
for index in range(len((nums)) - 2 ,-1,-1):
if nums[index] < subStack[-1]:
subStack.append(nums[index])
elif nums[index] == subStack[-1]:
continue
else:
position = self.findPosition(nums,subStack,0,len(subStack) - 1,nums[index])
subStack[position] = nums[index]
return len(subStack)
def findPosition(self,nums,subStack,start,end,num):
'''
找到num在subStack中应该处于的位置,并返回那个索引
:param nums:[int,] ,原始的数列
:param subStack: [int,],到目前为止已经构造好的降序数列栈
:param start: int,开始索引
:param end: int,终止索引
:param num: int,新的要加进来的数字
:return: int,表示实际应该插入的位置
'''
if subStack[start] <= num:
return start
if subStack[end] >= num:
return end
if start == end or start + 1 == end:
return end
mid = (start + end) // 2
if num == subStack[mid]:
return mid
elif num < subStack[mid]:
return self.findPosition(nums,subStack,mid,end,num)
else:
return self.findPosition(nums,subStack,start,mid,num)
if __name__ == '__main__':
print(Solution().lengthOfLIS([10,9,2,5,3,7,101,18])) | [
"1345616978@qq.com"
] | 1345616978@qq.com |
06dd454f3cc6627b6107b4bad371fd543ba0df59 | ef32b87973a8dc08ba46bf03c5601548675de649 | /pytglib/api/types/option_value_boolean.py | c8645e9adc81b47b314846938b95c884dc4e4c7b | [
"MIT"
] | permissive | iTeam-co/pytglib | 1a7580f0e0c9e317fbb0de1d3259c8c4cb90e721 | d3b52d7c74ee5d82f4c3e15e4aa8c9caa007b4b5 | refs/heads/master | 2022-07-26T09:17:08.622398 | 2022-07-14T11:24:22 | 2022-07-14T11:24:22 | 178,060,880 | 10 | 9 | null | null | null | null | UTF-8 | Python | false | false | 595 | py |
from ..utils import Object
class OptionValueBoolean(Object):
"""
Represents a boolean option
Attributes:
ID (:obj:`str`): ``OptionValueBoolean``
Args:
value (:obj:`bool`):
The value of the option
Returns:
OptionValue
Raises:
:class:`telegram.Error`
"""
ID = "optionValueBoolean"
def __init__(self, value, **kwargs):
self.value = value # bool
@staticmethod
def read(q: dict, *args) -> "OptionValueBoolean":
value = q.get('value')
return OptionValueBoolean(value)
| [
"me@amirh.co"
] | me@amirh.co |
6f3889fd9f1d8115b28c041894d80197faa8958e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03284/s932938186.py | f9974e8e57e1fe2b23ce35c3383ef130a046f77b | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | import sys
input = sys.stdin.readline
N, K = [int(x) for x in input().split()]
if N % K != 0:
print(1)
else:
print(0) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
f42c817db89647881254bc96d1a90ddcfc7de826 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /logs_write_1/retention-policy_delete.py | 4ce4469c1a441eb5b15a8dad07b0d26f4ffd024f | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 948 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_one_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/logs/delete-retention-policy.html
if __name__ == '__main__':
"""
put-retention-policy : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/logs/put-retention-policy.html
"""
parameter_display_string = """
# log-group-name : The name of the log group.
"""
add_option_dict = {}
#######################################################################
# parameter display string
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_one_parameter("logs", "delete-retention-policy", "log-group-name", add_option_dict)
| [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
50681b82fb23f55dca02a53dd784ad540e419c21 | baed2c2da1f776c0968d3cacd2fa45bdbe5482d6 | /S4cam/groupedCameras/TMP/legacy_designs/TMP_baseline_rev_multicam_test4_circular_elliptical_stop_leaders_8_39/elliptical_stop/polarization_analysis/plot_polarization_histograms.py | 63e6df9875b18b829138fcd042bd9ea942941e2f | [] | no_license | patogallardo/zemax_tools | 5ae2fe9a1e8b032684b8cf57457ee4f3239d9141 | 90d309c2f96c94469963eb905844d76fa2137bf9 | refs/heads/master | 2023-01-08T22:52:16.865852 | 2022-12-20T21:36:28 | 2022-12-20T21:36:28 | 234,634,525 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | '''Opens results from distorted mirrors and plots histograms of polarization
leakage'''
import matplotlib.pyplot as plt
import numpy as np
import os
fname1 = os.path.abspath("./crosspol/crosspol_output.npz")
fname2 = os.path.abspath("../gravitational_thermal_deformations/polarization/crosspol/crosspol_output.npz") # noqa
pols1 = np.load(fname1)["T_db"]
pols2 = np.load(fname2)["T_db"]
plt.hist(pols1, histtype='step')
plt.hist(pols2, histtype='step')
plt.savefig("polarization_histograms.pdf")
| [
"26889221+patogallardo@users.noreply.github.com"
] | 26889221+patogallardo@users.noreply.github.com |
a783a9e19c4613852480150686a548ef7fc28889 | 1690d6ec0451a5971fea1aded40f0f98e81ad89b | /ch05/25def2.py | 54f9bbf1a832610161c831c925367353d67f07b5 | [] | no_license | eggeggss/BigData | 09165e5935c25b0c5f343f457768c13ee4d7e9a5 | 0306ff01861c7dcde6dec99df58991411535036c | refs/heads/master | 2020-04-04T18:15:25.124491 | 2018-11-05T09:33:48 | 2018-11-05T09:33:48 | 156,156,338 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | #!/usr/bin/env
def fun2(num):
print("this is function2="+str(num))
#fun2(100)
#fun2(num=200)
fun2(num=None)
| [
"eggeggss@gmail.com"
] | eggeggss@gmail.com |
de60fcbc238e290b46b71f00839306c79306ef20 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03221/s313909172.py | 7053691761a5635cbd5e58b85e5ee589303cc652 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 539 | py | from sys import stdin, setrecursionlimit
from bisect import bisect_left
def main():
input = stdin.buffer.readline
n, m = map(int, input().split())
py = [list(map(int, input().split())) for _ in range(m)]
summary = [[] for _ in range(n)]
for p, y in py:
summary[p - 1].append(y)
for i in range(n):
summary[i].sort()
for p, y in py:
print(str(p).zfill(6), str(bisect_left(summary[p - 1], y) + 1).zfill(6), sep='')
if __name__ == "__main__":
setrecursionlimit(10000)
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
09709c91a5aaf7cf1de4c830b0e3f15e21759830 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_82/50.py | 0c6605af2e896c47bfa05692c350399f1ff042a0 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py | #! /usr/bin/python
import sys
def check_dist(nb_vendor, vendor, min_dist):
for i in xrange(nb_vendor):
for j in xrange(i+1, nb_vendor):
if (vendor[j] - vendor[i]) < min_dist:
return 0
return 1
def solve(nb_vendor, vendor, min_dist):
time = 0
while check_dist(nb_vendor, vendor, min_dist) == 0:
# one stop is a half-meter
time += 0.5
move = []
move.append(-0.5)
for i in xrange(1, nb_vendor-1):
diff = vendor[i] - vendor[i-1]
if diff > min_dist:
move.append(-0.5)
elif diff == min_dist:
move.append(move[i-1])
else:
move.append(0.5)
move.append(0.5)
for i in xrange(nb_vendor):
vendor[i] += move[i]
# print vendor
return time
fd = open(sys.argv[1])
num_cases = int(fd.readline())
for i in range(0, num_cases):
line = fd.readline().split(" ")
nb_pts = int(line[0])
min_dist = int(line[1])
nb_vendor = 0
vendor = []
for n in xrange(nb_pts):
line = fd.readline().split(" ")
pos = int(line[0])
num_vendor = int(line[1])
nb_vendor += num_vendor
for v in xrange(num_vendor):
vendor.append(pos)
# print vendor
output = solve(nb_vendor, vendor, min_dist)
print "Case #%d:" % (i+1), output
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
357f58315058e592739a421382e856391d2e1f97 | 7b15c40c00ba2008024979d0e520a922bc2f8229 | /2nd_try/0529_Minesweeper.py | 2bde189cdd44d12473720857df8d975c5259ad35 | [] | no_license | axd8911/Leetcode | aa9875a5b55c7d5e961d9a3ea55823d06eb08a88 | 1c6cab14f4dac4f3f29f1b5ce13bb5289724fdb4 | refs/heads/master | 2022-07-07T12:59:38.251218 | 2021-06-22T06:27:05 | 2021-06-22T06:27:05 | 173,857,144 | 0 | 1 | null | 2022-06-22T01:22:30 | 2019-03-05T02:23:42 | Python | UTF-8 | Python | false | false | 1,044 | py | class Solution:
def updateBoard(self, board: List[List[str]], click: List[int]) -> List[List[str]]:
a,b = click
if board[a][b] == 'M':
board[a][b] = 'X'
return board
h,w = len(board),len(board[0])
directions = [(1,0),(-1,0),(0,1),(0,-1),(1,1),(-1,1),(1,-1),(-1,-1)]
visited = {(a,b)}
queue = collections.deque([(a,b)])
while queue:
x,y = queue.popleft()
num = 0
for dx,dy in directions:
newX,newY = x+dx,y+dy
if 0<=newX<h and 0<=newY<w and board[newX][newY]=='M':
num += 1
if num>0:
board[x][y] = str(num)
else:
board[x][y] = 'B'
for dx,dy in directions:
newX,newY = x+dx,y+dy
if 0<=newX<h and 0<=newY<w and (newX,newY) not in visited:
visited.add((newX,newY))
queue.append((newX,newY))
return board
| [
"axd8911@hotmail.com"
] | axd8911@hotmail.com |
ed92641ba38ecc72946b4bca0f14b403d0ef8d9f | 8e6b59cf324c87de3d726a585f0f053cf129c5ed | /experiments/netowrks/testAllSimilarNets.py | ab77fb9fbc177a26f19637c3405a62135f147144 | [] | no_license | thodorisGeorgiou/comparison_of_hand_crafted_and_deep_learning_on_CFD_output | 4556d38a7a12384c0c2c7e780924bec584814272 | 80721c8af0eb48b0b9b8b1a5d5cccd97cc19304f | refs/heads/main | 2023-03-13T08:50:38.069662 | 2021-03-03T13:58:24 | 2021-03-03T13:58:24 | 344,140,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,222 | py | #Run test network scripts on repetative runs, in parallel
import os
import sys
from multiprocessing import Pool
# modelTypes = ["op", "cc"]
# modelTypes = ["vc", "ds", "op", "cc"]
modelTypes = ["vc"]
numRuns = 4
basePath = sys.argv[1]
mType = sys.argv[2]
if basePath[-1] != "/":
exit("Path must end with a slash")
# gpu = sys.argv[1]
# releaseDirs = ["vc/1/","vc/2/","vc/3/","vc/4/"]
def runTest(gpu):
run = str(gpu+1)
relDir = basePath+run+"Release/"
if not os.path.isdir(relDir):
print(relDir)
return
# os.system('python3 testNetworksOnFlow.py '+relDir+" "+mType)
# os.system('CUDA_VISIBLE_DEVICES='+str(gpu)+' python3 testNetworksOnFlow.py '+relDir+" "+mType)
os.system('CUDA_VISIBLE_DEVICES='+str(gpu)+' python3 testNetworks.py '+relDir+" "+mType)
runs = [i for i in range(4)]
p = Pool(4)
res = p.map(runTest, runs)
p.close()
p.join()
# for mType in modelTypes:
# for run in range(numRuns):
# # relDir = basePath+mType+"/"+str(run+1)+"/"
# relDir = basePath+str(run+1)+"Release/"
# if not os.path.isdir(relDir):
# print(relDir)
# continue
# os.system('CUDA_VISIBLE_DEVICES='+gpu+' python3 testNetworks.py '+relDir+" "+mType)
# # os.system('python3 testNetworks.py '+relDir+" "+mType) | [
"thodorisgeorgiou65@gmail.com"
] | thodorisgeorgiou65@gmail.com |
08d21fe53aa88f46cbec0f4f7d961b705b9a3937 | f2658c4bd7f833ace25ac2b63e88317b05f4602d | /2017 November/code/VendorDataImporter/rdf/common_tool/command_utils.py | 804d006cd090f84d8dcd954aac0dc2b63d6de4d2 | [] | no_license | xiaochao00/telanav_diary | e4c34ac0a14b65e4930e32012cc2202ff4ed91e2 | 3c583695e2880322483f526c98217c04286af9b2 | refs/heads/master | 2022-01-06T19:42:55.504845 | 2019-05-17T03:11:46 | 2019-05-17T03:11:46 | 108,958,763 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,987 | py | from common_utils import print_error, print_standout
import os, sys
def execute_cmd(cmd):
print_standout("execute cmd is :%s" % cmd)
s = os.system(cmd)
if s:
print_error("execute cmd[%s] failed" % cmd)
sys.exit(-1)
def parse_size_info_response_lines(response_lines):
"""
:param response_lines:
the response_lines of command 'df -m directory'
Filesystem 1M-blocks Used Available Use% Mounted on
:return: response dict
{'Filesystem': , 'TotalSize': , 'Used': , 'Available': , 'UsedRate': , 'MountedOn': }
default unit is KB
"""
if not response_lines:
print_error('parse the response line of command failed. response lines can not none ')
return None
# parse line num of 'Filesystem 1M-blocks Used Available Use% Mounted on'
specified_line_index = -1
for line in response_lines:
specified_line_index += 1
if line.find("Filesystem") != -1 and line.find("Available") != -1:
break
if specified_line_index == -1 or specified_line_index == len(response_lines):
print_error("in parse the response line of df command. lines is %s ." % response_lines)
return None
# names = response_lines[0].strip().split()
# the next line of specified_line_index
names = ['Filesystem', 'TotalSize', 'Used', 'Available', 'UsedRate', 'MountedOn']
values = response_lines[specified_line_index + 1].strip().split()
if len(names) != len(values):
print_error("parse command response line failed. lines : %s" % response_lines)
response_dict = {}
for i in range(len(names)):
name = names[i]
value = values[i]
response_dict[name] = value
# change unit to B
response_dict["TotalSize"] = float(response_dict["TotalSize"]) * 1024
response_dict["Used"] = float(response_dict["Used"]) * 1024
response_dict["Available"] = float(response_dict["Available"]) * 1024
return response_dict
| [
"1363180272@qq.com"
] | 1363180272@qq.com |
5214f839a0865e8185396f000fb2bfbf469fc0ee | e0dcc1e7c862823278a93c40bfe263bbd196944b | /setup.py | 5a6ad4f2c70e2f58b778427d4f65c0d973e82cd3 | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | baiqj/ToughRADIUS | 3e9300e3f65264d2e43bdf9f3475077de7d4491e | 382b80d2c38ad2fba2a848776d959da9701e002b | refs/heads/master | 2021-01-24T03:48:05.517408 | 2015-03-02T07:00:22 | 2015-03-02T07:00:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,857 | py | #!/usr/bin/python
from setuptools import setup, find_packages
import toughradius
version = toughradius.__version__
install_requires = [
'argparse',
'MySQL-python>=1.2.5',
'Mako>=0.9.0',
'Beaker>=1.6.4',
'MarkupSafe>=0.18',
'PyYAML>=3.10',
'SQLAlchemy>=0.9.8',
'Twisted>=13.0.0',
'autobahn>=0.9.3-3',
'bottle>=0.12.7',
'six>=1.8.0',
'tablib>=0.10.0',
'zope.interface>=4.1.1',
'pycrypto==2.6.1',
'sh==1.11',
'pyOpenSSL==0.14',
'service_identity',
]
install_requires_empty = []
package_data={
'toughradius': [
'console/admin/views/*',
'console/customer/views/*',
'console/static/css/*',
'console/static/fonts/*',
'console/static/img/*',
'console/static/js/*',
'console/static/favicon.ico',
'radiusd/dicts/*'
]
}
setup(name='toughradius',
version=version,
author='jamiesun',
author_email='jamiesun.net@gmail.com',
url='https://github.com/talkincode/ToughRADIUS',
license='BSD',
description='RADIUS Server',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 6 - Mature',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Systems Administration :: Authentication/Directory',
],
packages=find_packages(),
package_data=package_data,
keywords=['radius', 'authentication'],
zip_safe=True,
include_package_data=True,
install_requires=install_requires,
scripts=['bin/toughctl'],
tests_require='nose>=0.10.0b1',
test_suite='nose.collector',
) | [
"jamiesun.net@gmail.com"
] | jamiesun.net@gmail.com |
9b319c036fd8d33a105fc098ecc2c5fbeec64da4 | 596e92d0d484b6e7eee6d322e72e52748fdeaa5d | /test/test_nfl_scores_team_season.py | e7165e08a90d72cf9112826ce8a69d29d3d5003c | [] | no_license | scottypate/sportsdata | f5f61ddc7eb482883f93737c6ce73dd814ed4336 | a07955ab50bf4fff1ce114ed9895095ff770c473 | refs/heads/main | 2023-08-18T16:51:56.452678 | 2021-10-22T12:44:08 | 2021-10-22T12:44:08 | 420,062,350 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 954 | py | # coding: utf-8
"""
NFL v3 Scores
NFL schedules, scores, odds, weather, and news API. # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import sportsdata.nfl_scores
from sportsdata.nfl_scores.models.nfl_scores_team_season import NflScoresTeamSeason # noqa: E501
from sportsdata.nfl_scores.rest import ApiException
class TestNflScoresTeamSeason(unittest.TestCase):
"""NflScoresTeamSeason unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNflScoresTeamSeason(self):
"""Test NflScoresTeamSeason"""
# FIXME: construct object with mandatory attributes with example values
# model = sportsdata.nfl_scores.models.nfl_scores_team_season.NflScoresTeamSeason() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"scotty.pate@auth0.com"
] | scotty.pate@auth0.com |
50e5d4ca95a51c397925e4fce092f972e845b93a | c744f3ae44ab1a692b4b6a39ce2c3045c81406c4 | /app/api_0_1/resources/config_yongjin.py | 27c50713b382bc9acd2b21172fe19037c2afa7e1 | [] | no_license | qq529952515/OA | f1e56d37c8e4b35b6f2e9bbdd0fb8370b90cc64d | ccf17b17b3122f9650bb1ab939befad784e9b4e0 | refs/heads/master | 2023-05-07T22:16:04.130146 | 2020-05-17T07:20:59 | 2020-05-17T07:20:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75,444 | py | from flask import request
from flask_restful.reqparse import RequestParser
from flask_restful import Resource, marshal_with, fields
from app.api_0_1.common.utils import make_response, make_marshal_fields, convert_pagination, \
make_response_from_pagination
from app.models import db
from app.models.common.utils import paginate
from app.models.entertainment_city import EntertainmentCity
from app.models.member import Member
from app.models.config_yongjin import ConfigYongjin, YongJinTyb
import json
from sqlalchemy import and_
from app.api_0_1.common import DEFAULT_PAGE, DEFAULT_PAGE_SIZE
from app.models.member_yongjin_compute import MemberYongjinCompute
from sqlalchemy import func
from flask import current_app
'''
系统设置 - 佣金
'''
# 返回娱乐城和对应的游戏类型
''' AG:{"1002":0.9,"1003":0.8,"1003":0.7},
BB:{"1002":0.4,"1003":0.3}'''
class YlcAndGametype(Resource):
def get(self):
data = {}
query = db.session.query(
EntertainmentCity.code,
EntertainmentCity.game_types).all()
ecmap = {}
for i in query:
eccode = i.code
ecmap[eccode] = {}
gtyps = i.game_types
if gtyps is None or gtyps == "":
continue
typeList = gtyps.split(",")
for type in typeList:
ecmap[eccode][type] = None
data["tuiyongbi"] = ecmap
data['pcJine'] = None
data['yxhuiyuan'] = None
data['youhui'] = None
data['fanshui'] = None
return make_response(data)
# 佣金设定列表
class ConfigYongjinAPI(Resource):
@marshal_with(make_marshal_fields({
'id': fields.Integer,
'name': fields.String,
'enable': fields.Integer,
'zdtze': fields.Float,
'cksxfsx': fields.Float,
'qksxfsx': fields.Float,
'dbcksxf': fields.Float,
'dbqksxf': fields.Float,
'zdckje': fields.Float,
'dls': fields.Integer}))
def get(self):
query = db.session.query(
ConfigYongjin.id,
ConfigYongjin.name,
ConfigYongjin.enable,
ConfigYongjin.zdtze,
ConfigYongjin.dbcksxf,
ConfigYongjin.dbqksxf,
ConfigYongjin.cksxfsx,
ConfigYongjin.qksxfsx,
ConfigYongjin.zdckje)
pagination = paginate(query)
query1 = db.session.query(ConfigYongjin.id).all()
res = []
result = []
for id in query1:
a = id[0]
count = db.session.query(Member).filter(and_(Member.commissionConfig == a, Member.type != 0)).count()
res.append(count)
for i in range(len(pagination.items)):
item = dict(zip(pagination.items[i].keys(), pagination.items[i]))
item['dls'] = res[i]
result.append(item)
return make_response(result)
# 新增佣金设定
def post(self):
data = request.get_json()
data = data['data']
configyongjin = ConfigYongjin()
try:
if 'enable' in data:
if data['enable'] is not '':
configyongjin.enable = data['enable']
if 'name' in data:
if data['name'] is not '':
configyongjin.name = data['name']
else:
return {"errorMsg": "标题不能为空"}
if 'zdtze' in data:
if data['zdtze'] is not '':
configyongjin.zdtze = data['zdtze']
else:
return {"errorMsg": "最低投注额不能为空"}
if 'dbcksxf' in data:
if data['dbcksxf'] is not '':
configyongjin.dbcksxf = data['dbcksxf']
else:
return {"errorMsg": "单笔存款手续费不能为空"}
if 'dbqksxf' in data:
if data['dbqksxf'] is not '':
configyongjin.dbqksxf = data['dbqksxf']
else:
return {"errorMsg": "单笔取款手续费不能为空"}
if 'cksxfsx' in data:
if data['cksxfsx'] is '':
configyongjin.cksxfsx = None
else:
configyongjin.cksxfsx = data['cksxfsx']
if 'qksxfsx' in data:
if data['qksxfsx'] is '':
configyongjin.qksxfsx = None
else:
configyongjin.qksxfsx = data['qksxfsx']
if 'zdckje' in data:
if data['zdckje'] is '':
configyongjin.zdckje = None
else:
configyongjin.zdckje = data['zdckje']
if 'fendang' in data:
fendang = data['fendang']
if not fendang:
return {"errorMsg": "派彩,有效会员,退佣比,优惠,返水是必填项"}
db.session.add(configyongjin)
# db.session.commit()
db.session.flush()
for i in fendang:
tyb = YongJinTyb()
tyb.Yid = configyongjin.id
if i['pcJine'] is not None:
tyb.pcJine = i['pcJine']
else:
raise Exception("派彩金额不能为空")
if i['yxhuiyuan'] is not None:
tyb.yxhuiyuan = i['yxhuiyuan']
else:
raise Exception("有效会员不能为空")
if i['youhui'] is not None:
tyb.youhui = i['youhui']
else:
raise Exception("优惠不能为空")
if i['fanshui'] is not None:
tyb.fanshui = i['fanshui']
else:
raise Exception("返水不能为空")
if i['tuiyongbi'] is not None:
j = json.dumps(i['tuiyongbi'])
tyb.tuiyongbi = j
db.session.add(tyb)
db.session.commit()
except Exception as e:
db.session.rollback()
db.session.remove()
# if configyongjin.id:
# db.session.delete(configyongjin)
# try:
# db.session.commit()
# except:
# return {'success': False, 'errorMsg': '添加失败'}
return {'success': False, "errorMsg":'派彩,有效会员,退佣比,优惠,返水是必填项'}
return {'success': True, 'errorMsg': '添加成功'}
# 获取详情页
class YongJinContent(Resource):
@marshal_with(make_marshal_fields({
'id': fields.Integer,
'enable': fields.Integer,
'name': fields.String,
'zdtze': fields.Float,
'dbcksxf': fields.Float,
'cksxfsx': fields.Float,
'dbqksxf': fields.Float,
'qksxfsx': fields.Float,
'zdckje': fields.Float,
'dls': fields.Integer,
'fendang': fields.String
}))
def get(self, id):
data = {}
count = db.session.query(Member).filter(and_(Member.commissionConfig == id, Member.type != 0)).count()
query = db.session.query(
ConfigYongjin.id,
ConfigYongjin.cksxfsx,
ConfigYongjin.qksxfsx,
ConfigYongjin.enable,
ConfigYongjin.zdtze,
ConfigYongjin.dbcksxf,
ConfigYongjin.dbqksxf,
ConfigYongjin.name,
ConfigYongjin.zdckje).filter(ConfigYongjin.id == id)
for a in query:
data['id'] = id
data['enable'] = a.enable
data['zdtze'] = a.zdtze
data['dbqksxf'] = a.dbqksxf
data['dbcksxf'] = a.dbcksxf
data['zdckje'] = a.zdckje
data['dls'] = count
data['cksxfsx'] = a.cksxfsx
data['qksxfsx'] = a.qksxfsx
data['name'] = a.name
query1 = db.session.query(
YongJinTyb.id,
YongJinTyb.pcJine,
YongJinTyb.youhui,
YongJinTyb.yxhuiyuan,
YongJinTyb.fanshui,
YongJinTyb.tuiyongbi).filter(YongJinTyb.Yid == id).all()
data['fendang'] = []
for i in query1:
a = {}
a['id'] = i[0]
a['pcJine'] = i[1]
a['youhui'] = i[2]
a['yxhuiyuan'] = i[3]
a['fanshui'] = i[4]
a['tuiyongbi'] = i[5]
data['fendang'].append(a)
data['fendang'] = json.dumps((data['fendang']))
return make_response(data)
def put(self, id):
data = request.get_json()
data = data['data']
configyongjin = ConfigYongjin.query.get(id)
try:
if data['name']:
configyongjin.name = data['name']
if data['zdtze']:
configyongjin.zdtze = data['zdtze']
if data['dbcksxf']:
configyongjin.dbcksxf = data['dbcksxf']
if data['cksxfsx']:
configyongjin.cksxfsx = data['cksxfsx']
if data['dbqksxf']:
configyongjin.dbqksxf = data['dbqksxf']
if data['qksxfsx']:
configyongjin.qksxfsx = data['qksxfsx']
if data['zdckje']:
configyongjin.zdckje = data['zdckje']
try:
db.session.add(configyongjin)
except:
db.session.rollback()
db.session.remove()
if 'delete' in data:
num = data['delete']
try:
for dd in num:
yjtyb = YongJinTyb.query.filter(YongJinTyb.id == dd).first()
db.session.delete(yjtyb)
except:
db.session.rollback()
db.session.remove()
new = data['fendang']
for i in new:
if 'id' in i:
id = i['id']
yjtyb = YongJinTyb.query.get(id)
if i['pcJine']:
yjtyb.pcJine = i['pcJine']
if i['yxhuiyuan']:
yjtyb.yxhuiyuan = i['yxhuiyuan']
if i['pcJine']:
yjtyb.youhui = i['youhui']
if i['fanshui']:
yjtyb.fanshui = i['fanshui']
if i['tuiyongbi']:
yjtyb.tuiyongbi = json.dumps(i['tuiyongbi'])
try:
db.session.add(yjtyb)
except:
db.session.rollback()
db.session.remove()
if "id" not in i:
try:
tyb = YongJinTyb()
tyb.Yid = configyongjin.id
if i['pcJine'] is not None:
tyb.pcJine = i['pcJine']
else:
return {"errorMsg": "派彩金额不能为空"}
if i['yxhuiyuan'] is not None:
tyb.yxhuiyuan = i['yxhuiyuan']
else:
return {"errorMsg": "有效会员不能为空"}
if i['youhui'] is not None:
tyb.youhui = i['youhui']
else:
return {"errorMsg": "优惠不能为空"}
if i['fanshui'] is not None:
tyb.fanshui = i['fanshui']
else:
return {"errorMsg": "返水不能为空"}
if i['tuiyongbi'] is not None:
j = json.dumps(i['tuiyongbi'])
tyb.tuiyongbi = j
db.session.add(tyb)
except:
db.session.rollback()
db.session.remove()
db.session.commit()
except:
db.session.rollback()
db.session.remove()
return {'success': True, 'messages': '修改成功'}
def delete(self, id):
count = db.session.query(Member).filter(and_(Member.commissionConfig == id, Member.type != 0)).count()
if count >= 1:
return {'success': False, 'errorMsg': '此设定有代理不可删除'}
else:
enen = db.session.query(ConfigYongjin).get(id)
db.session.delete(enen)
db.session.commit()
tyb = db.session.query(YongJinTyb).filter(YongJinTyb.Yid == id).first()
if tyb:
db.session.delete(tyb)
db.session.commit()
return {'success': True}
class ChangeStatus(Resource):
def put(self):
parser = RequestParser()
parser.add_argument('id', type=int)
parser.add_argument('enable', type=int)
args = parser.parse_args(strict=True)
args = {key: value for key, value in args.items() if value is not None}
try:
ConfigYongjin.query.filter(ConfigYongjin.id == args["id"]).update(args)
db.session.commit()
except:
db.session.rollback()
db.session.remove()
return {'success': True, 'messages': '修改成功'}
'''佣金计算'''
import time
import datetime
import copy
from app.models.member_fanshui_pc import MemberFanshuiPc
from app.models.blast_bets import BlastBets, BlastBetsCredit
from app.models.member_account_change import MemberAccountChangeRecord, Deposit, Withdrawal
from app.models.entertainment_city_bets_detail import EntertainmentCityBetsDetail
from sqlalchemy import and_, or_
from app.models.member_yongjin_compute import MemberYongjinCompute, MemberAgentDetail, MemberAgentExport
from sqlalchemy import func
from flask import current_app
import decimal
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return float(o)
super(DecimalEncoder, self).default(o)
'''代理佣金计算'''
class AgentYongjinCompute(Resource):
def get(self):
parser = RequestParser(trim=True)
parser.add_argument('startTime', type=str)
parser.add_argument('endTime', type=str)
args = parser.parse_args(strict=True)
startTime = request.args.get('startTime')
endTime = request.args.get('endTime')
# 最后一天, 时间+1
date_list = time.strptime(endTime, "%Y-%m-%d")
y, m, d = date_list[:3]
delta = datetime.timedelta(days=1)
date_result = datetime.datetime(y, m, d) + delta
end_date = date_result.strftime("%Y-%m-%d")
start_time = int(time.mktime(time.strptime(startTime, '%Y-%m-%d')))
end_time = int(time.mktime(time.strptime(end_date, '%Y-%m-%d')))
try:
res_recordId = db.session.query(MemberYongjinCompute.recordId).filter().order_by(
MemberYongjinCompute.recordId.desc()).first()
recordId = res_recordId[0] + 1
except:
db.session.rollback()
recordId = 1
# 取出每个用户每天的投注金额
result_bets_betamount = db.session.query(
BlastBets.uid,
BlastBets.username,
func.sum(BlastBets.mode * BlastBets.beiShu * BlastBets.actionNum).label("betAmount"),
).filter(BlastBets.state == 2, BlastBets.actionTime.between(start_time, end_time)) \
.group_by(BlastBets.uid, BlastBets.username).all()
result_credit_betamount = db.session.query(
BlastBetsCredit.memberId,
BlastBetsCredit.memberUsername,
func.sum(BlastBetsCredit.betAmount)
).filter(BlastBetsCredit.state == 2, BlastBetsCredit.betTime.between(start_time, end_time)) \
.group_by(BlastBetsCredit.memberId, BlastBetsCredit.memberUsername).all()
# 将聚合查询得到的decimal数据转为float, 再转回Python对象
data_bets = json.dumps(result_bets_betamount, cls=DecimalEncoder)
data_bets = json.loads(data_bets)
data_credit = json.dumps(result_credit_betamount, cls=DecimalEncoder)
data_credit = json.loads(data_credit)
dict1 = {a[0]: a for a in data_bets}
for b in data_credit:
if b[0] in dict1:
dict1[b[0]][2] += b[2]
else:
dict1[b[0]] = b
data = list(dict1.values())
data_betAmount = copy.deepcopy(data)
# 取出每个用户每天的总中奖金额
result_bets_bonus = db.session.query(
BlastBets.uid,
BlastBets.username,
func.sum(BlastBets.bonus),
).filter(BlastBets.state == 2, BlastBets.actionTime.between(start_time, end_time)) \
.group_by(BlastBets.uid, BlastBets.username).all()
result_credit_bonus = db.session.query(
BlastBetsCredit.memberId,
BlastBetsCredit.memberUsername,
func.sum(BlastBetsCredit.bonus)
).filter(BlastBetsCredit.state == 2, BlastBetsCredit.betTime.between(start_time, end_time)) \
.group_by(BlastBetsCredit.memberId, BlastBetsCredit.memberUsername).all()
# 将聚合查询得到的decimal数据转为float, 再转回Python对象
data_bets_bonus = json.dumps(result_bets_bonus, cls=DecimalEncoder)
data_bets_bonus = json.loads(data_bets_bonus)
data_credit_bonus = json.dumps(result_credit_bonus, cls=DecimalEncoder)
data_credit_bonus = json.loads(data_credit_bonus)
dict2 = {a[0]: a for a in data_bets_bonus}
for b in data_credit_bonus:
if b[0] in dict2:
dict2[b[0]][2] += b[2]
else:
dict2[b[0]] = b
data_bonus = list(dict2.values())
# KK的损益
dict3 = {a[0]: a for a in data}
for b in data_bonus:
if b[0] in dict3:
dict3[b[0]][2] -= b[2]
else:
dict3[b[0]] = b
data_sunyi = list(dict3.values())
data_sunyi_copy = copy.deepcopy(data_sunyi)
# 再加入娱乐城的损益
result_EC_betAmount = db.session.query(
EntertainmentCityBetsDetail.PlayerName,
func.sum(EntertainmentCityBetsDetail.Profit)
).filter(EntertainmentCityBetsDetail.BetTime.between(start_time, end_time)) \
.group_by(EntertainmentCityBetsDetail.PlayerName).all()
result_EC_betAmount_list = []
for res in result_EC_betAmount:
result_EC_list = []
reb = db.session.query(Member.id).filter(Member.username == res[0]).first()
result_EC_list.append(reb[0])
result_EC_list.append(res[0])
result_EC_list.append(res[1])
result_EC_betAmount_list.append(result_EC_list)
dict_4 = {a[0]: a for a in data_sunyi}
for b in result_EC_betAmount_list:
if b[0] in dict_4:
dict_4[b[0]][2] -= b[2]
else:
dict_4[b[0]] = b
data_total_sunyi = list(dict_4.values())
# 把每个代理时间区间内的总损益和总有效会员人数查出来
agent_list = set() # set集合存代理id, 去重
for i in data_total_sunyi:
try:
reb = db.session.query(Member.parent).filter(Member.id == i[0], Member.isTsetPLay != 1).first()
agent_list.add(reb[0])
except:
db.session.rollback()
continue
list_a = [] # list_a每个代理的id、每个代理对应的会员的损益总和、每个代理对应的会员人数
for i_id in agent_list:
list_b = []
sunyi = 0
n = 0
for i in data_total_sunyi:
try:
reb = db.session.query(Member.parent).filter(Member.id == i[0], Member.isTsetPLay != 1).first()
if reb[0]:
if reb[0] == i_id:
n += 1
sunyi += i[2]
except:
db.session.rollback()
continue
list_b.append(i_id)
list_b.append(sunyi)
list_b.append(n)
list_a.append(list_b)
# 会员没有在KK玩过,但有存取款或者优惠
# 查询每个会员每天的优惠 discounts
uid_list = []
for i in data_sunyi_copy:
uid_list.append(i[0])
res_discounts = db.session.query(
MemberAccountChangeRecord.memberId
).filter(or_(MemberAccountChangeRecord.accountChangeType == 121,
MemberAccountChangeRecord.accountChangeType == 100010,
MemberAccountChangeRecord.accountChangeType == 100011,
MemberAccountChangeRecord.accountChangeType == 900006,
MemberAccountChangeRecord.accountChangeType == 900007),
MemberAccountChangeRecord.time.between(start_time, end_time)).all()
# 存款
res_deposits = db.session.query(Deposit.memberId).filter(
Deposit.status == 2, Deposit.isAcdemen == 1,
Deposit.auditTime.between(start_time, end_time)).all()
# 取款
res_withdrawals = db.session.query(Withdrawal.memberId).filter(
Withdrawal.status == 2, Withdrawal.isAcdemen == 1,
Withdrawal.auditTime.between(start_time, end_time)).all()
uid_set = set()
for i in res_discounts:
uid_set.add(i[0])
for i in res_deposits:
uid_set.add(i[0])
for i in res_withdrawals:
uid_set.add(i[0])
for i in uid_set:
if i not in uid_list:
# 找出该会员对应的代理
parent_id = db.session.query(Member.parent, Member.username).filter(Member.id == i,
Member.isTsetPLay != 1).first()
reb = db.session.query(Member.commissionConfig).filter(Member.id == parent_id[0]).first()
is_yongjin = db.session.query(ConfigYongjin.enable).filter(ConfigYongjin.id == reb[0]).first()
if is_yongjin[0] == 1:
discounts = 0
for parent_list in list_a:
# reb = db.session.query(Member.commissionConfig).filter(Member.id == parent_list[0]).first()
res_yongjin = db.session.query(YongJinTyb).filter(YongJinTyb.Yid == reb[0]).order_by(
YongJinTyb.pcJine.desc()).all()
if reb:
if parent_list[0] == parent_id[0]:
for yj in res_yongjin:
res_discounts = db.session.query(
MemberAccountChangeRecord.memberId,
func.sum(MemberAccountChangeRecord.amount)
).filter(or_(MemberAccountChangeRecord.accountChangeType == 121,
MemberAccountChangeRecord.accountChangeType == 100010,
MemberAccountChangeRecord.accountChangeType == 100011,
MemberAccountChangeRecord.accountChangeType == 900006,
MemberAccountChangeRecord.accountChangeType == 900007),
MemberAccountChangeRecord.memberId == i,
MemberAccountChangeRecord.time.between(start_time, end_time)).first()
if res_discounts[0]:
discounts = round(res_discounts[1] * yj.youhui / 100, 2)
# 存款
res_deposits = db.session.query(Deposit.memberId, func.sum(Deposit.applicationAmount)).filter(
Deposit.memberId == i, Deposit.status == 2, Deposit.isAcdemen == 1,
Deposit.auditTime.between(start_time, end_time)).first()
if res_deposits[0]:
deposits = res_deposits[1]
else:
deposits = 0
# 取款
res_withdrawals = db.session.query(Withdrawal.memberId,
func.sum(Withdrawal.withdrawalAmount)).filter(
Withdrawal.memberId == i, Withdrawal.status == 2, Withdrawal.isAcdemen == 1,
Withdrawal.auditTime.between(start_time, end_time)).first()
if res_withdrawals[0]:
withdrawals = res_withdrawals[1]
else:
withdrawals = 0
actionTime = int(time.time())
member_yongjin = MemberYongjinCompute()
member_yongjin.recordId = recordId
member_yongjin.uid = i
member_yongjin.username = parent_id[1]
member_yongjin.discounts = discounts # 优惠
member_yongjin.actionTime = actionTime
member_yongjin.startTime = startTime
member_yongjin.endTime = endTime
member_yongjin.type = 1
member_yongjin.ec_name = 'KK'
member_yongjin.childType = 1001
member_yongjin.deposits = deposits
member_yongjin.withdrawals = withdrawals
member_yongjin.parentId = parent_id[0]
member_yongjin.sunyi = 0
member_yongjin.amount = 0
member_yongjin.betAmount = 0
try:
db.session.add(member_yongjin)
db.session.commit()
except:
db.session.rollback()
db.session.remove()
# #根据uid去查blast_members表,查出相应佣金设定
for i in data_sunyi_copy:
# 找出该会员对应的代理
parent_id = db.session.query(Member.parent).filter(Member.id == i[0], Member.isTsetPLay != 1).first()
if parent_id:
reb = db.session.query(Member.commissionConfig).filter(Member.id == parent_id[0]).first()
is_yongjin = db.session.query(ConfigYongjin.enable).filter(ConfigYongjin.id == reb[0]).first()
if is_yongjin[0] == 1:
for parent_list in list_a:
# reb = db.session.query(Member.commissionConfig).filter(Member.id == parent_list[0]).first()
res_yongjin = db.session.query(YongJinTyb).filter(YongJinTyb.Yid == reb[0]).order_by(
YongJinTyb.pcJine.desc()).all()
if reb:
if parent_list[0] == parent_id[0]:
amount = 0
discounts = 0
fanshui = 0
for yj in res_yongjin:
if parent_list[1] >= yj.pcJine and parent_list[2] >= yj.yxhuiyuan:
tyb = json.loads(yj.tuiyongbi)
if tyb['kk']['1001'] == None:
tyb = 0
else:
tyb = tyb['kk']['1001'] / 100
amount = round(i[2] * tyb, 2) # 佣金
# 查询每个会员每天的优惠 discounts
res_discounts = db.session.query(
func.sum(MemberAccountChangeRecord.amount)
).filter(or_(MemberAccountChangeRecord.accountChangeType == 121,
MemberAccountChangeRecord.accountChangeType == 100010,
MemberAccountChangeRecord.accountChangeType == 100011,
MemberAccountChangeRecord.accountChangeType == 900006,
MemberAccountChangeRecord.accountChangeType == 900007),
MemberAccountChangeRecord.memberId == i[0],
MemberAccountChangeRecord.time.between(start_time, end_time)).first()
if res_discounts[0]:
discounts = round(res_discounts[0] * yj.youhui / 100, 2)
# 批次返水
res_fanshui = db.session.query(func.sum(MemberFanshuiPc.amount)).filter(
MemberFanshuiPc.uid == i[0],
MemberFanshuiPc.fanshuiTime.between(startTime, endTime),
MemberFanshuiPc.ec_name == 'KK',
MemberFanshuiPc.childType == '1001',
).first()
if res_fanshui[0]:
fanshui = round(res_fanshui[0] * yj.fanshui / 100, 2)
break
else:
tyb = json.loads(res_yongjin[-1].tuiyongbi)
if tyb['kk']['1001'] == None:
tyb = 0
else:
tyb = tyb['kk']['1001'] / 100
amount = round(i[2] * tyb, 2) # 佣金
# 查询每个会员每天的优惠 discounts
res_discounts = db.session.query(
func.sum(MemberAccountChangeRecord.amount)
).filter(or_(MemberAccountChangeRecord.accountChangeType == 121,
MemberAccountChangeRecord.accountChangeType == 100010,
MemberAccountChangeRecord.accountChangeType == 100011),
MemberAccountChangeRecord.memberId == i[0],
MemberAccountChangeRecord.time.between(start_time, end_time)).first()
if res_discounts[0]:
discounts = round(res_discounts[0] * yj.youhui / 100, 2)
# 批次返水
res_fanshui = db.session.query(func.sum(MemberFanshuiPc.amount)).filter(
MemberFanshuiPc.uid == i[0],
MemberFanshuiPc.fanshuiTime.between(startTime, endTime),
MemberFanshuiPc.ec_name == 'KK',
MemberFanshuiPc.childType == '1001'
).first()
if res_fanshui[0]:
fanshui = round(res_fanshui[0] * yj.fanshui / 100, 2)
sunyi = round(i[2], 2) # 损益
# 时时返水
res_ss_fanshui = db.session.query(
func.sum(MemberAccountChangeRecord.amount)).filter(
MemberAccountChangeRecord.memberId == i[0],
MemberAccountChangeRecord.accountChangeType == 2,
MemberAccountChangeRecord.time.between(start_time, end_time)).first()
if res_ss_fanshui[0]:
ss_fanshui = res_ss_fanshui[0]
else:
ss_fanshui = 0
# 存款
res_deposits = db.session.query(func.sum(Deposit.applicationAmount)).filter(
Deposit.memberId == i[0], Deposit.status == 2, Deposit.isAcdemen == 1,
Deposit.auditTime.between(start_time, end_time)).first()
if res_deposits[0]:
deposits = res_deposits[0]
else:
deposits = 0
# 取款
res_withdrawals = db.session.query(func.sum(Withdrawal.withdrawalAmount)).filter(
Withdrawal.memberId == i[0], Withdrawal.status == 2, Withdrawal.isAcdemen == 1,
Withdrawal.auditTime.between(start_time, end_time)).first()
if res_withdrawals[0]:
withdrawals = res_withdrawals[0]
else:
withdrawals = 0
# 用深拷贝
betAmount = 0
for bet in data_betAmount:
if bet[0] == i[0]:
betAmount = bet[2]
# current_app.logger.info("%s会员%s的损益%s,优惠%s,存款%s,取款%s,投注金额%s" % (
# '2019-07-18', i[0], amount, discounts, deposits, withdrawals, betAmount))
actionTime = int(time.time())
member_yongjin = MemberYongjinCompute()
member_yongjin.recordId = recordId
member_yongjin.uid = i[0]
member_yongjin.username = i[1]
member_yongjin.sunyi = sunyi
member_yongjin.amount = amount
member_yongjin.discounts = discounts
member_yongjin.betAmount = betAmount
member_yongjin.actionTime = actionTime
member_yongjin.startTime = startTime
member_yongjin.endTime = endTime
member_yongjin.fanshui = fanshui
member_yongjin.ss_fanshui = ss_fanshui
member_yongjin.type = 1
member_yongjin.ec_name = 'KK'
member_yongjin.childType = 1001
member_yongjin.deposits = deposits
member_yongjin.withdrawals = withdrawals
member_yongjin.parentId = parent_id[0]
try:
db.session.add(member_yongjin)
db.session.commit()
except:
db.session.rollback()
db.session.remove()
# 娱乐城数据加到tb_member_yongjin
new_result_EC_betAmount = db.session.query(
EntertainmentCityBetsDetail.ECCode,
EntertainmentCityBetsDetail.PlayerName,
func.sum(EntertainmentCityBetsDetail.ValidBetAmount),
func.sum(EntertainmentCityBetsDetail.CusAccount),
EntertainmentCityBetsDetail.childType,
).filter(EntertainmentCityBetsDetail.ReckonTime.between(start_time, end_time)) \
.group_by(EntertainmentCityBetsDetail.ECCode, EntertainmentCityBetsDetail.PlayerName,
EntertainmentCityBetsDetail.childType).all()
for res in new_result_EC_betAmount:
# print(res)
reb = db.session.query(Member.id, Member.parent).filter(Member.username == res[1]).first()
result = db.session.query(Member.commissionConfig).filter(Member.id == reb[1],
Member.isTsetPLay != 1).first()
result_tyb = db.session.query(YongJinTyb).filter(YongJinTyb.Yid == result[0]).order_by(
YongJinTyb.pcJine.desc()).all()
is_yongjin = db.session.query(ConfigYongjin.enable).filter(ConfigYongjin.id == result[0]).first()
if is_yongjin[0] == 1:
amount = 0
fanshui = 0
for parent_list in list_a:
if reb[1] == parent_list[0]:
for res_tyb in result_tyb:
if parent_list[1] >= res_tyb.pcJine and parent_list[2] >= res_tyb.yxhuiyuan:
tyb = json.loads(res_tyb.tuiyongbi)
for k, v in tyb.items():
if k == res[0]:
for key, value in v.items():
if int(key) == res[4]:
if value == None:
tyb = 0
else:
tyb = value
amount = round((res[2] - res[3]) * tyb / 100, 2)
fanshui = db.session.query(func.sum(MemberFanshuiPc.amount)).filter(
MemberFanshuiPc.username == res[1],
MemberFanshuiPc.fanshuiTime.between(startTime, endTime),
MemberFanshuiPc.ec_name == k,
MemberFanshuiPc.childType == int(key)
).first()
if fanshui[0] == None:
fanshui = 0
else:
fanshui = fanshui[0]
break
else:
tyb = json.loads(result_tyb[-1].tuiyongbi)
for k, v in tyb.items():
if k == res[0]:
for key, value in v.items():
if int(key) == res[4]:
if value == None:
tyb = 0
else:
tyb = value
amount = round((res[2] - res[3]) * tyb / 100, 2)
fanshui = db.session.query(func.sum(MemberFanshuiPc.amount)).filter(
MemberFanshuiPc.username == res[1],
MemberFanshuiPc.fanshuiTime.between(startTime, endTime),
MemberFanshuiPc.ec_name == k,
MemberFanshuiPc.childType == int(key)
).first()
if fanshui[0] == None:
fanshui = 0
else:
fanshui = fanshui[0]
actionTime = int(time.time())
# yongjinTime = (datetime.date.today() + datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
ec_member_yongjin = MemberYongjinCompute()
ec_member_yongjin.recordId = recordId
ec_member_yongjin.uid = reb[0]
ec_member_yongjin.username = res[1]
ec_member_yongjin.sunyi = round(res[2] - res[3], 2) # 损益
ec_member_yongjin.amount = amount # 佣金
ec_member_yongjin.fanshui = fanshui # 返水
ec_member_yongjin.betAmount = res[2]
ec_member_yongjin.actionTime = actionTime
ec_member_yongjin.startTime = startTime
ec_member_yongjin.endTime = endTime
ec_member_yongjin.type = 2
ec_member_yongjin.ec_name = res[0]
ec_member_yongjin.childType = res[4]
ec_member_yongjin.parentId = reb[1]
try:
db.session.add(ec_member_yongjin)
db.session.commit()
except:
db.session.rollback()
db.session.remove()
# 代理佣金计算
res_agent_yongjin = db.session.query(
MemberYongjinCompute.parentId,
func.sum(MemberYongjinCompute.sunyi).label("sunyi"),
func.sum(MemberYongjinCompute.amount).label("amount"),
func.sum(MemberYongjinCompute.discounts).label("discounts"),
func.sum(MemberYongjinCompute.fanshui).label("fanshui"),
func.sum(MemberYongjinCompute.deposits).label("deposits"),
func.sum(MemberYongjinCompute.withdrawals).label("withdrawals"),
func.sum(MemberYongjinCompute.betAmount).label("betAmount"),
func.sum(MemberYongjinCompute.ss_fanshui).label("ss_fanshui") # i[8]
).filter(MemberYongjinCompute.recordId == recordId).group_by(
MemberYongjinCompute.parentId).all()
# current_app.logger.info("所有代理的聚合查询的结果集%s" % res_agent_yongjin)
# try:
# res_recordId = db.session.query(MemberAgentDetail.recordId).filter().order_by(
# MemberAgentDetail.recordId.desc()).first()
#
# recordId = res_recordId[0] + 1
# except:
# db.session.rollback()
# recordId = 1
data_a = []
for i in res_agent_yongjin: # i[0]是代理id
if i[0]:
d = {}
d['sunyi'] = round(i[1], 2)
d['betAmount'] = round(i[7], 2)
# 计算存取款手续费
reb = db.session.query(Member.commissionConfig, Member.username, Member.type).filter(
Member.id == i[0]).first()
try:
agent_sheding = db.session.query(ConfigYongjin).filter(ConfigYongjin.id == reb[0]).first()
except:
return {'success': False, 'errorMsg': '没有有效数据数据'}
current_app.logger.info("查询代理%s对应的佣金设定%s" % (i[0], agent_sheding))
cun = i[5] * agent_sheding.dbcksxf / 100
if cun < agent_sheding.cksxfsx:
agent_cun = cun
else:
agent_cun = agent_sheding.cksxfsx
qu = i[6] * agent_sheding.dbqksxf / 100
if qu < agent_sheding.qksxfsx:
agent_qu = qu
else:
agent_qu = agent_sheding.qksxfsx
# 计算代理佣金
agent_yj = round(i[2] - i[3] - i[4] - i[8] - (agent_cun + agent_qu), 2)
d['agent_yj'] = agent_yj
# 计算每个代理下的人数
res_member_id = db.session.query(Member.id).filter(Member.parent == i[0],
Member.isTsetPLay != 1).all()
res_member_id_yj = db.session.query(MemberYongjinCompute.uid).filter(
MemberYongjinCompute.startTime == startTime, MemberYongjinCompute.endTime == endTime).all()
member_count = set()
for res in res_member_id:
for res_yj in res_member_id_yj:
if res[0] == res_yj[0]:
member_count.add(res_yj[0])
d['member_count'] = len(member_count)
d['username'] = reb[1]
d['id'] = i[0]
d['type'] = reb[2]
d['recordId'] = recordId
current_app.logger.info("给定时间%s内代理%s的佣金%s,会员人数%s,损益%s,有效投注金额%s" % (
'2019-7-30', d['id'], d['agent_yj'], d['member_count'], d['sunyi'], d['betAmount']))
data_a.append(d)
youhui = i[3]
youhui_bi = 0
fanshui = i[4]
fanshui_bi = 0
ss_fanshui = i[8]
deposits = i[5]
withdrawals = i[6]
agent_tyb = db.session.query(YongJinTyb).filter(YongJinTyb.Yid == reb[0]).order_by(
YongJinTyb.pcJine.desc()).all()
for result in agent_tyb:
if d['sunyi'] >= result.pcJine and d['member_count'] >= result.yxhuiyuan:
youhui_bi = result.youhui
fanshui_bi = result.fanshui
break
else:
youhui_bi = agent_tyb[-1].youhui
fanshui_bi = agent_tyb[-1].fanshui
break
memberAgent = MemberAgentDetail()
memberAgent.uid = d['id']
memberAgent.recordId = recordId
memberAgent.username = d['username']
memberAgent.type = d['type']
memberAgent.yongjin = d['agent_yj']
memberAgent.memberCount = d['member_count']
memberAgent.sunyi = d['sunyi']
memberAgent.betAmount = d['betAmount']
memberAgent.youhui = youhui
memberAgent.youhui_bi = youhui_bi
memberAgent.fanshui = fanshui
memberAgent.fanshui_bi = fanshui_bi
memberAgent.ss_fanshui = ss_fanshui
memberAgent.deposits = deposits
memberAgent.withdrawals = withdrawals
memberAgent.startTime = startTime
memberAgent.endTime = endTime
try:
db.session.add(memberAgent)
db.session.commit()
except Exception as e:
db.session.rollback()
db.session.remove()
return make_response(data_a)
class AgentYongjinExport(Resource):
def get(self):
parser = RequestParser(trim=True)
parser.add_argument('recordId', type=int)
parser.add_argument('uid', type=int)
args = parser.parse_args(strict=True)
agentId = int(request.args.get('uid'))
recordId = int(request.args.get('recordId'))
res_agent_yj = db.session.query(MemberAgentDetail.startTime, MemberAgentDetail.endTime).filter(
MemberAgentDetail.uid == agentId, MemberAgentDetail.recordId == recordId).first()
res_member_yj = db.session.query(
MemberYongjinCompute.username,
func.sum(MemberYongjinCompute.sunyi).label("sunyi"),
func.sum(MemberYongjinCompute.discounts).label("discounts"),
func.sum(MemberYongjinCompute.fanshui).label("fanshui"),
func.sum(MemberYongjinCompute.ss_fanshui).label("ss_fanshui"),
func.sum(MemberYongjinCompute.deposits).label("deposits"),
func.sum(MemberYongjinCompute.withdrawals).label("withdrawals"),
func.sum(MemberYongjinCompute.betAmount).label("betAmount")).filter(
MemberYongjinCompute.parentId == agentId,
MemberYongjinCompute.recordId == recordId
).group_by(MemberYongjinCompute.username).all()
res_member_ec = db.session.query(MemberYongjinCompute.uid, MemberYongjinCompute.username,
func.sum(MemberYongjinCompute.amount).label("amount"),
func.sum(MemberYongjinCompute.betAmount).label("betAmount"),
MemberYongjinCompute.ec_name, MemberYongjinCompute.childType).filter(
MemberYongjinCompute.parentId == agentId,
MemberYongjinCompute.recordId == recordId).group_by(
MemberYongjinCompute.id, MemberYongjinCompute.ec_name, MemberYongjinCompute.childType).all()
from openpyxl import Workbook
import os, time
workbook = Workbook()
worksheet = workbook.create_sheet("会员资料", 0)
biaoti = ['会员名', '总损益', '总优惠', '总存款', '总提款', '总返水', '总时时返水', '总有效投注', 'KK彩票有效投注', 'KK彩票损益', 'AG视讯有效投注',
'AG视讯损益', 'AG电子有效投注', 'AG电子损益', 'PT电子有效投注', 'PT电子损益', 'PT捕鱼有效投注', 'PT捕鱼损益',
'KAIYUAN棋牌有效投注', 'KAIYUAN棋牌损益']
worksheet.append(biaoti)
for result in res_member_yj:
resul_list = []
for res in result:
if res == None:
res = 0
resul_list.append(res)
for i in res_member_ec:
if i.username == result.username:
if i.ec_name == 'KK' and i.childType == 1001:
if len(resul_list) == 8:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[8] = round(i.betAmount, 2)
resul_list[9] = round(i.amount, 2)
else:
if len(resul_list) == 8:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'AG' and i.childType == 1002:
if len(resul_list) == 10:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[10] = round(i.betAmount, 2)
resul_list[11] = round(i.amount, 2)
else:
if len(resul_list) == 10:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'AG' and i.childType == 1004:
if len(resul_list) == 12:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[12] = round(i.betAmount, 2)
resul_list[13] = round(i.amount, 2)
else:
if len(resul_list) == 12:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'PT' and i.childType == 1004:
if len(resul_list) == 14:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[14] = round(i.betAmount, 2)
resul_list[15] = round(i.amount, 2)
else:
if len(resul_list) == 14:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'PT' and i.childType == 1007:
if len(resul_list) == 16:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[16] = round(i.betAmount, 2)
resul_list[17] = round(i.amount, 2)
else:
if len(resul_list) == 16:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'KAIYUAN' and i.childType == 1003:
if len(resul_list) == 18:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[18] = round(i.betAmount, 2)
resul_list[19] = round(i.amount, 2)
else:
if len(resul_list) == 18:
resul_list.append(0)
resul_list.append(0)
worksheet.append(resul_list)
filename = 'result-' + str(int(time.time())) + '.xlsx'
workbook.save(os.path.join(current_app.static_folder, filename))
# path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# path = os.path.join(path, 'static')
# workbook.save(os.path.join(path, filename))
return make_response([{
'success': True,
'resultFilename': filename,
}])
class AllAgentYongjinExport(Resource):
def get(self):
parser = RequestParser(trim=True)
parser.add_argument('recordId', type=int)
parser.add_argument('hasMemberDetail', type=int)
args = parser.parse_args(strict=True)
recordId = int(request.args.get('recordId'))
hasMemberDetail = int(request.args.get('hasMemberDetail'))
res_agent_yj = db.session.query(MemberAgentDetail.startTime, MemberAgentDetail.endTime).filter(
MemberAgentDetail.recordId == recordId).first()
if hasMemberDetail == 1:
res_member_yj = db.session.query(
MemberYongjinCompute.username,
func.sum(MemberYongjinCompute.sunyi).label("sunyi"),
func.sum(MemberYongjinCompute.discounts).label("discounts"),
func.sum(MemberYongjinCompute.deposits).label("deposits"),
func.sum(MemberYongjinCompute.withdrawals).label("withdrawals"),
func.sum(MemberYongjinCompute.fanshui).label("fanshui"),
func.sum(MemberYongjinCompute.ss_fanshui).label("ss_fanshui"),
func.sum(MemberYongjinCompute.betAmount).label("betAmount")).filter(
MemberYongjinCompute.recordId == recordId).group_by(
MemberYongjinCompute.username).all()
res_member_ec = db.session.query(MemberYongjinCompute.uid, MemberYongjinCompute.username,
func.sum(MemberYongjinCompute.amount).label("amount"),
func.sum(MemberYongjinCompute.betAmount).label("betAmount"),
MemberYongjinCompute.ec_name, MemberYongjinCompute.childType).filter(
MemberYongjinCompute.recordId == recordId).group_by(
MemberYongjinCompute.id, MemberYongjinCompute.ec_name, MemberYongjinCompute.childType).all()
from openpyxl import Workbook
import os, time
workbook = Workbook()
ws1 = workbook.create_sheet("会员资料", 0)
biaoti = ['会员名', '总损益', '总优惠', '总存款', '总提款', '总返水', '总时时返水', '总有效投注', 'KK彩票有效投注', 'KK彩票损益', 'AG视讯有效投注',
'AG视讯损益', 'AG电子有效投注', 'AG电子损益', 'PT电子有效投注', 'PT电子损益', 'PT捕鱼有效投注', 'PT捕鱼损益',
'KAIYUAN棋牌有效投注', 'KAIYUAN棋牌损益']
ws1.append(biaoti)
for result in res_member_yj:
resul_list = []
for res in result:
if res == None:
res = 0
resul_list.append(res)
for i in res_member_ec:
if i.username == result.username:
if i.ec_name == 'KK' and i.childType == 1001:
if len(resul_list) == 8:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[8] = round(i.betAmount, 2)
resul_list[9] = round(i.amount, 2)
else:
if len(resul_list) == 8:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'AG' and i.childType == 1002:
if len(resul_list) == 10:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[10] = round(i.betAmount, 2)
resul_list[11] = round(i.amount, 2)
else:
if len(resul_list) == 10:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'AG' and i.childType == 1004:
if len(resul_list) == 12:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[12] = round(i.betAmount, 2)
resul_list[13] = round(i.amount, 2)
else:
if len(resul_list) == 12:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'PT' and i.childType == 1004:
if len(resul_list) == 14:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[14] = round(i.betAmount, 2)
resul_list[15] = round(i.amount, 2)
else:
if len(resul_list) == 14:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'PT' and i.childType == 1007:
if len(resul_list) == 16:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[16] = round(i.betAmount, 2)
resul_list[17] = round(i.amount, 2)
else:
if len(resul_list) == 16:
resul_list.append(0)
resul_list.append(0)
if i.ec_name == 'KAIYUAN' and i.childType == 1003:
if len(resul_list) == 18:
resul_list.append(round(i.betAmount, 2))
resul_list.append(round(i.amount, 2))
else:
resul_list[18] = round(i.betAmount, 2)
resul_list[19] = round(i.amount, 2)
else:
if len(resul_list) == 18:
resul_list.append(0)
resul_list.append(0)
ws1.append(resul_list)
# filename = 'result-' + str(int(time.time())) + '.xlsx'
# # workbook.save(os.path.join(current_app.static_folder, filename))
# path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# path = os.path.join(path, 'static')
# workbook.save(os.path.join(path, filename))
res_pt_yingjin = db.session.query(
MemberYongjinCompute.parentId,
func.sum(MemberYongjinCompute.amount).label("amount")).filter(
MemberYongjinCompute.recordId == recordId).group_by(
MemberYongjinCompute.parentId).all()
# 代理
result_agent_yj = db.session.query(MemberAgentDetail.uid,
MemberAgentDetail.username,
MemberAgentDetail.type,
MemberAgentDetail.yongjin,
MemberAgentDetail.memberCount,
MemberAgentDetail.sunyi,
MemberAgentDetail.youhui,
MemberAgentDetail.fanshui,
MemberAgentDetail.ss_fanshui,
MemberAgentDetail.deposits,
MemberAgentDetail.withdrawals,
MemberAgentDetail.betAmount
).filter(MemberAgentDetail.recordId == recordId).all()
from openpyxl.styles import Border, Side, PatternFill, Font, GradientFill, Alignment
ws2 = workbook.create_sheet("代理佣金", 1)
ws2.merge_cells('A1:A2')
ws2.merge_cells('B1:B2')
ws2.merge_cells('C1:C2')
ws2.merge_cells('D1:D2')
ws2.merge_cells('E1:E2')
ws2.merge_cells('F1:F2')
ws2.merge_cells('G1:I1')
ws2.merge_cells('J1:L1')
ws2.merge_cells('M1:O1')
ws2.merge_cells('P1:P2')
ws2.merge_cells('Q1:Q2')
ws2.merge_cells('R1:R2')
ws2.merge_cells('S1:S2')
ws2.merge_cells('T1:T2')
ws2['A1'] = "层级"
ws2['B1'] = "账号"
ws2['C1'] = "佣金"
ws2['D1'] = "有效会员"
ws2['E1'] = "总损益"
ws2['F1'] = "平台佣金"
ws2['G1'] = "优惠"
ws2['J1'] = "返水"
ws2['M1'] = "时时返水"
ws2['G2'] = "总优惠"
ws2['H2'] = "优惠占比"
ws2['I2'] = "优惠负担额"
ws2['J2'] = "总返水"
ws2['K2'] = "返水占比"
ws2['L2'] = "返水负担额"
ws2['M2'] = "总时时返水"
ws2['N2'] = "时时返水占比"
ws2['O2'] = "时时返水负担额"
ws2['P1'] = "总存款"
ws2['Q1'] = "总存款手续费"
ws2['R1'] = "总取款"
ws2['S1'] = "总取款手续费"
ws2['T1'] = "总有效投注"
ws2["A1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["B1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["C1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["D1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["E1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["F1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["G1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["J1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["M1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["P1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["Q1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["R1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["S1"].alignment = Alignment(horizontal="center", vertical="center")
ws2["T1"].alignment = Alignment(horizontal="center", vertical="center")
for result in result_agent_yj:
result_bi = db.session.query(MemberAgentDetail.youhui_bi, MemberAgentDetail.fanshui_bi).filter(
MemberAgentDetail.recordId == recordId, MemberAgentDetail.uid == result.uid).first()
youhui_bi = '%d%%' % result_bi[0]
fanshui_bi = '%d%%' % result_bi[1]
ss_fanshui_bi = '%d%%' % 100
res_list = []
reb = db.session.query(Member.commissionConfig).filter(Member.id == result[0]).first()
# r = db.session.query(YongJinTyb).filter(YongJinTyb.Yid==reb[0]).order_by(YongJinTyb.pcJine.desc()).all()
agent_sheding = db.session.query(ConfigYongjin).filter(ConfigYongjin.id == reb[0]).first()
cksxf = result[9] * agent_sheding.dbcksxf / 100
qksxf = result[10] * agent_sheding.dbqksxf / 100
pt_yongjin = None # 平台佣金
for re in res_pt_yingjin:
r_name = db.session.query(Member.username).filter(Member.id == re[0]).first()
if result.username == r_name[0]:
pt_yongjin = round(re[1], 2)
res_list.append(result[2])
res_list.append(result[1])
res_list.append(result[3])
res_list.append(result[4])
res_list.append(result[5]) # 总损益
res_list.append(pt_yongjin)
res_list.append(result[6])
res_list.append(youhui_bi)
res_list.append(result[6])
res_list.append(result[7])
res_list.append(fanshui_bi)
res_list.append(result[7])
res_list.append(result[8])
res_list.append(ss_fanshui_bi)
res_list.append(result[8])
res_list.append(result[9])
res_list.append(cksxf)
res_list.append(result[10])
res_list.append(qksxf)
res_list.append(result[11])
ws2.append(res_list)
# print(res_list)
filename = 'result-' + str(int(time.time())) + '.xlsx'
workbook.save(os.path.join(current_app.static_folder, filename))
# path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# path = os.path.join(path, 'static')
# workbook.save(os.path.join(path, filename))
return make_response([{
'success': True,
'resultFilename': filename,
}])
if hasMemberDetail == 0:
res_pt_yingjin = db.session.query(
MemberYongjinCompute.parentId,
func.sum(MemberYongjinCompute.amount).label("amount")).filter(
MemberYongjinCompute.recordId == recordId).group_by(
MemberYongjinCompute.parentId).all()
# 代理
result_agent_yj = db.session.query(MemberAgentDetail.uid,
MemberAgentDetail.username,
MemberAgentDetail.type,
MemberAgentDetail.yongjin,
MemberAgentDetail.memberCount,
MemberAgentDetail.sunyi,
MemberAgentDetail.youhui,
MemberAgentDetail.fanshui,
MemberAgentDetail.ss_fanshui,
MemberAgentDetail.deposits,
MemberAgentDetail.withdrawals,
MemberAgentDetail.betAmount
).filter(MemberAgentDetail.recordId == recordId).all()
from openpyxl import Workbook
import os, time
from openpyxl.styles import Border, Side, PatternFill, Font, GradientFill, Alignment
workbook_agent = Workbook()
ws3 = workbook_agent.create_sheet("代理佣金", 0)
ws3.merge_cells('A1:A2')
ws3.merge_cells('B1:B2')
ws3.merge_cells('C1:C2')
ws3.merge_cells('D1:D2')
ws3.merge_cells('E1:E2')
ws3.merge_cells('F1:F2')
ws3.merge_cells('G1:I1')
ws3.merge_cells('J1:L1')
ws3.merge_cells('M1:O1')
ws3.merge_cells('P1:P2')
ws3.merge_cells('Q1:Q2')
ws3.merge_cells('R1:R2')
ws3.merge_cells('S1:S2')
ws3.merge_cells('T1:T2')
ws3['A1'] = "层级"
ws3['B1'] = "账号"
ws3['C1'] = "佣金"
ws3['D1'] = "有效会员"
ws3['E1'] = "总损益"
ws3['F1'] = "平台佣金"
ws3['G1'] = "优惠"
ws3['J1'] = "返水"
ws3['M1'] = "时时返水"
ws3['G2'] = "总优惠"
ws3['H2'] = "优惠占比"
ws3['I2'] = "优惠负担额"
ws3['J2'] = "总返水"
ws3['K2'] = "返水占比"
ws3['L2'] = "返水负担额"
ws3['M2'] = "总时时返水"
ws3['N2'] = "时时返水占比"
ws3['O2'] = "时时返水负担额"
ws3['P1'] = "总存款"
ws3['Q1'] = "总存款手续费"
ws3['R1'] = "总取款"
ws3['S1'] = "总取款手续费"
ws3['T1'] = "总有效投注"
ws3["A1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["B1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["C1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["D1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["E1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["F1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["G1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["J1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["M1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["P1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["Q1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["R1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["S1"].alignment = Alignment(horizontal="center", vertical="center")
ws3["T1"].alignment = Alignment(horizontal="center", vertical="center")
# biaoti_agent = ['会员等级', '账号', '佣金', '有效会员', '总损益', '平台佣金', '总优惠', '优惠占比', '优惠负担额', '总返水', '总返水占比', '总返水负担额',
# '总时时返水', '总时时返水占比', '总时时返水负担额', '总存款', '总存款手续费', '总提款', '总提款手续费', '总有效投注', ]
# worksheet_agent.append(biaoti_agent)
for result in result_agent_yj:
result_bi = db.session.query(MemberAgentDetail.youhui_bi, MemberAgentDetail.fanshui_bi).filter(
MemberAgentDetail.recordId == recordId, MemberAgentDetail.uid == result.uid).first()
youhui_bi = '%d%%' % result_bi[0]
fanshui_bi = '%d%%' % result_bi[1]
ss_fanshui_bi = '%d%%' % 100
res_list = []
reb = db.session.query(Member.commissionConfig).filter(Member.id == result[0]).first()
# r = db.session.query(YongJinTyb).filter(YongJinTyb.Yid==reb[0]).order_by(YongJinTyb.pcJine.desc()).all()
agent_sheding = db.session.query(ConfigYongjin).filter(ConfigYongjin.id == reb[0]).first()
cksxf = result[9] * agent_sheding.dbcksxf / 100
qksxf = result[10] * agent_sheding.dbqksxf / 100
pt_yongjin = None # 平台佣金
for re in res_pt_yingjin:
r_name = db.session.query(Member.username).filter(Member.id == re[0]).first()
if result.username == r_name[0]:
pt_yongjin = round(re[1], 2)
res_list.append(result[2])
res_list.append(result[1])
res_list.append(result[3])
res_list.append(result[4])
res_list.append(result[5]) # 总损益
res_list.append(pt_yongjin)
res_list.append(result[6])
res_list.append(youhui_bi)
res_list.append(result[6])
res_list.append(result[7])
res_list.append(fanshui_bi)
res_list.append(result[7])
res_list.append(result[8])
res_list.append(ss_fanshui_bi)
res_list.append(result[8])
res_list.append(result[9])
res_list.append(cksxf)
res_list.append(result[10])
res_list.append(qksxf)
res_list.append(result[11])
ws3.append(res_list)
filename = 'result-' + str(int(time.time())) + '.xlsx'
workbook_agent.save(os.path.join(current_app.static_folder, filename))
# path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# path = os.path.join(path, 'static')
# workbook_agent.save(os.path.join(path, filename))
return make_response([{
'success': True,
'resultFilename': filename,
}])
| [
"15294786074@163.com"
] | 15294786074@163.com |
4b5304e679901413066a5018e1a246a624a23db2 | 1d2e26ea7ed6d49c05a45e8b55243e52011b74e4 | /tests/test_main.py | 91c29b2679ea43df1ebebd177965776cc571f6ec | [] | no_license | mikeizbicki/html_validator | 723837acd090f29675bb4660d92710a7c9648d73 | d786058e2a8a55341e733f55be3770870f7d4124 | refs/heads/master | 2023-02-18T05:40:00.774317 | 2023-02-03T20:32:23 | 2023-02-03T20:32:23 | 238,632,615 | 1 | 143 | null | 2023-02-08T08:01:43 | 2020-02-06T07:36:35 | Python | UTF-8 | Python | false | false | 3,574 | py | import HTML_Validator
import pytest
def test__extract_tags_1():
assert HTML_Validator._extract_tags('') == []
def test__extract_tags_2():
assert HTML_Validator._extract_tags('python in fun') == []
def test__extract_tags_3():
assert HTML_Validator._extract_tags('<strong></strong>') == ['<strong>','</strong>']
def test__extract_tags_4():
assert HTML_Validator._extract_tags('python in <strong>fun</strong>') == ['<strong>','</strong>']
def test__extract_tags_5():
assert HTML_Validator._extract_tags('<a><b><c></a></b><f>') == ['<a>','<b>','<c>','</a>','</b>','<f>']
"""
def test__extract_tags_6():
with pytest.raises(ValueError, match='found < without matching >'):
HTML_Validator._extract_tags('<')
def test__extract_tags_7():
with pytest.raises(ValueError, match='found < without matching >'):
HTML_Validator._extract_tags('this is a <strong test')
def test__extract_tags_8():
with pytest.raises(ValueError, match='found < without matching >'):
HTML_Validator._extract_tags('this is a <strong< test')
"""
def test__extract_tags_9():
n = 10000
open_tags = [ '<' + str(i) + '>' for i in range(n) ]
close_tags = [ '</' + str(i) + '>' for i in range(n) ]
tags = open_tags + close_tags
assert HTML_Validator._extract_tags(' '.join(tags)) == tags
def test_validate_html_1():
assert HTML_Validator.validate_html('')
def test_validate_html_2():
assert HTML_Validator.validate_html('<a></a>')
def test_validate_html_3():
assert not HTML_Validator.validate_html('<a>')
def test_validate_html_4():
assert not HTML_Validator.validate_html('</a>')
def test_validate_html_5():
assert HTML_Validator.validate_html('<strong></strong><b></b>')
def test_validate_html_6():
assert HTML_Validator.validate_html('<strong><b></b></strong>')
def test_validate_html_6():
assert HTML_Validator.validate_html('<strong><strong></strong></strong>')
def test_validate_html_7():
assert not HTML_Validator.validate_html('<strong><b></strong></b>')
def test_validate_html_8():
assert HTML_Validator.validate_html('this is a test <em>hello!</em>')
def test_validate_html_9():
assert HTML_Validator.validate_html('''
<html>
<head>
<title>This is an awesome webpage!</title>
</head>
<body>
<p>Programming is the <strong>best</strong>!</p>
</body>
</html>
''')
def test_validate_html_10():
assert not HTML_Validator.validate_html('''
<html>
<head>
<title>This is an awesome webpage!</title>
</head>
<body>
<p>Programming is the <strong>best</strong>!
</body>
</html>
''')
def test_validate_html_11():
assert not HTML_Validator.validate_html('<')
def test_validate_html_12():
assert not HTML_Validator.validate_html('this is a <strong test')
def test_validate_html_13():
assert not HTML_Validator.validate_html('this is a <strong< test')
def test_validate_html_14():
n = 10000
open_tags = [ '<' + str(i) + '>' for i in range(n) ]
close_tags = [ '</' + str(i) + '>' for i in range(n) ]
close_tags.reverse()
tags = open_tags + close_tags
assert HTML_Validator.validate_html(' '.join(tags))
assert not HTML_Validator.validate_html(' '.join(open_tags))
assert not HTML_Validator.validate_html(' '.join(close_tags))
assert not HTML_Validator.validate_html(' '.join(tags[0:-1]))
def test_validate_html_15():
n = 10000
tags = [ '<' + str(i) + '></' + str(i) + '>' for i in range(n) ]
assert HTML_Validator.validate_html(' '.join(tags))
| [
"mike@izbicki.me"
] | mike@izbicki.me |
c03dc16c54586a55a1a21aae25066d0c7d634f84 | 8a699595e7f156b1ade42f6042900b3331831fbf | /src/transformers/models/splinter/modeling_splinter.py | 86e1c846c82fb291b4e23edf390f3a1ee3dbeb23 | [
"Apache-2.0"
] | permissive | stas00/transformers | ab654371a387c5883fc882dd0286177875d6d3b4 | 7c5d79912a21880ce13d77881940458e90d98917 | refs/heads/master | 2023-02-16T00:22:41.298155 | 2022-04-08T20:55:42 | 2022-04-08T20:55:42 | 278,214,696 | 6 | 0 | Apache-2.0 | 2022-01-28T18:39:00 | 2020-07-08T23:24:49 | Python | UTF-8 | Python | false | false | 44,759 | py | # coding=utf-8
# Copyright 2021 Tel AViv University, AllenAI and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch Splinter model."""
import math
from typing import List, Optional, Tuple, Union
import torch
import torch.utils.checkpoint
from torch import nn
from torch.nn import CrossEntropyLoss
from ...activations import ACT2FN
from ...modeling_outputs import BaseModelOutputWithPastAndCrossAttentions, QuestionAnsweringModelOutput
from ...modeling_utils import (
PreTrainedModel,
apply_chunking_to_forward,
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
from .configuration_splinter import SplinterConfig
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "tau/splinter-base"
_CONFIG_FOR_DOC = "SplinterConfig"
_TOKENIZER_FOR_DOC = "SplinterTokenizer"
SPLINTER_PRETRAINED_MODEL_ARCHIVE_LIST = [
"tau/splinter-base",
"tau/splinter-base-qass",
"tau/splinter-large",
"tau/splinter-large-qass",
# See all Splinter models at https://huggingface.co/models?filter=splinter
]
class SplinterEmbeddings(nn.Module):
"""Construct the embeddings from word, position and token_type embeddings."""
def __init__(self, config):
super().__init__()
self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id)
self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size)
self.token_type_embeddings = nn.Embedding(config.type_vocab_size, config.hidden_size)
# self.LayerNorm is not snake-cased to stick with TensorFlow model variable name and be able to load
# any TensorFlow checkpoint file
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
# position_ids (1, len position emb) is contiguous in memory and exported when serialized
self.register_buffer("position_ids", torch.arange(config.max_position_embeddings).expand((1, -1)))
self.position_embedding_type = getattr(config, "position_embedding_type", "absolute")
def forward(
self,
input_ids: Optional[torch.LongTensor] = None,
token_type_ids: Optional[torch.LongTensor] = None,
position_ids: Optional[torch.LongTensor] = None,
inputs_embeds: Optional[torch.FloatTensor] = None,
past_key_values_length: Optional[int] = 0,
) -> Tuple:
if input_ids is not None:
input_shape = input_ids.size()
else:
input_shape = inputs_embeds.size()[:-1]
seq_length = input_shape[1]
if position_ids is None:
position_ids = self.position_ids[:, past_key_values_length : seq_length + past_key_values_length]
if token_type_ids is None:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=self.position_ids.device)
if inputs_embeds is None:
inputs_embeds = self.word_embeddings(input_ids)
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings = inputs_embeds + token_type_embeddings
if self.position_embedding_type == "absolute":
position_embeddings = self.position_embeddings(position_ids)
embeddings += position_embeddings
embeddings = self.LayerNorm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
# Copied from transformers.models.bert.modeling_bert.BertSelfAttention with Bert->Splinter
class SplinterSelfAttention(nn.Module):
def __init__(self, config, position_embedding_type=None):
super().__init__()
if config.hidden_size % config.num_attention_heads != 0 and not hasattr(config, "embedding_size"):
raise ValueError(
f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention "
f"heads ({config.num_attention_heads})"
)
self.num_attention_heads = config.num_attention_heads
self.attention_head_size = int(config.hidden_size / config.num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(config.hidden_size, self.all_head_size)
self.key = nn.Linear(config.hidden_size, self.all_head_size)
self.value = nn.Linear(config.hidden_size, self.all_head_size)
self.dropout = nn.Dropout(config.attention_probs_dropout_prob)
self.position_embedding_type = position_embedding_type or getattr(
config, "position_embedding_type", "absolute"
)
if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query":
self.max_position_embeddings = config.max_position_embeddings
self.distance_embedding = nn.Embedding(2 * config.max_position_embeddings - 1, self.attention_head_size)
self.is_decoder = config.is_decoder
def transpose_for_scores(self, x):
new_x_shape = x.size()[:-1] + (self.num_attention_heads, self.attention_head_size)
x = x.view(new_x_shape)
return x.permute(0, 2, 1, 3)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.FloatTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
encoder_hidden_states: Optional[torch.FloatTensor] = None,
encoder_attention_mask: Optional[torch.FloatTensor] = None,
past_key_value: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
output_attentions: Optional[bool] = False,
) -> Tuple[torch.Tensor]:
mixed_query_layer = self.query(hidden_states)
# If this is instantiated as a cross-attention module, the keys
# and values come from an encoder; the attention mask needs to be
# such that the encoder's padding tokens are not attended to.
is_cross_attention = encoder_hidden_states is not None
if is_cross_attention and past_key_value is not None:
# reuse k,v, cross_attentions
key_layer = past_key_value[0]
value_layer = past_key_value[1]
attention_mask = encoder_attention_mask
elif is_cross_attention:
key_layer = self.transpose_for_scores(self.key(encoder_hidden_states))
value_layer = self.transpose_for_scores(self.value(encoder_hidden_states))
attention_mask = encoder_attention_mask
elif past_key_value is not None:
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
key_layer = torch.cat([past_key_value[0], key_layer], dim=2)
value_layer = torch.cat([past_key_value[1], value_layer], dim=2)
else:
key_layer = self.transpose_for_scores(self.key(hidden_states))
value_layer = self.transpose_for_scores(self.value(hidden_states))
query_layer = self.transpose_for_scores(mixed_query_layer)
if self.is_decoder:
# if cross_attention save Tuple(torch.Tensor, torch.Tensor) of all cross attention key/value_states.
# Further calls to cross_attention layer can then reuse all cross-attention
# key/value_states (first "if" case)
# if uni-directional self-attention (decoder) save Tuple(torch.Tensor, torch.Tensor) of
# all previous decoder key/value_states. Further calls to uni-directional self-attention
# can concat previous decoder key/value_states to current projected key/value_states (third "elif" case)
# if encoder bi-directional self-attention `past_key_value` is always `None`
past_key_value = (key_layer, value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = torch.matmul(query_layer, key_layer.transpose(-1, -2))
if self.position_embedding_type == "relative_key" or self.position_embedding_type == "relative_key_query":
seq_length = hidden_states.size()[1]
position_ids_l = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(-1, 1)
position_ids_r = torch.arange(seq_length, dtype=torch.long, device=hidden_states.device).view(1, -1)
distance = position_ids_l - position_ids_r
positional_embedding = self.distance_embedding(distance + self.max_position_embeddings - 1)
positional_embedding = positional_embedding.to(dtype=query_layer.dtype) # fp16 compatibility
if self.position_embedding_type == "relative_key":
relative_position_scores = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding)
attention_scores = attention_scores + relative_position_scores
elif self.position_embedding_type == "relative_key_query":
relative_position_scores_query = torch.einsum("bhld,lrd->bhlr", query_layer, positional_embedding)
relative_position_scores_key = torch.einsum("bhrd,lrd->bhlr", key_layer, positional_embedding)
attention_scores = attention_scores + relative_position_scores_query + relative_position_scores_key
attention_scores = attention_scores / math.sqrt(self.attention_head_size)
if attention_mask is not None:
# Apply the attention mask is (precomputed for all layers in SplinterModel forward() function)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.functional.softmax(attention_scores, dim=-1)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.dropout(attention_probs)
# Mask heads if we want to
if head_mask is not None:
attention_probs = attention_probs * head_mask
context_layer = torch.matmul(attention_probs, value_layer)
context_layer = context_layer.permute(0, 2, 1, 3).contiguous()
new_context_layer_shape = context_layer.size()[:-2] + (self.all_head_size,)
context_layer = context_layer.view(new_context_layer_shape)
outputs = (context_layer, attention_probs) if output_attentions else (context_layer,)
if self.is_decoder:
outputs = outputs + (past_key_value,)
return outputs
# Copied from transformers.models.bert.modeling_bert.BertSelfOutput with Bert->Splinter
class SplinterSelfOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor:
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
# Copied from transformers.models.bert.modeling_bert.BertAttention with Bert->Splinter
class SplinterAttention(nn.Module):
def __init__(self, config, position_embedding_type=None):
super().__init__()
self.self = SplinterSelfAttention(config, position_embedding_type=position_embedding_type)
self.output = SplinterSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads
)
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.FloatTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
encoder_hidden_states: Optional[torch.FloatTensor] = None,
encoder_attention_mask: Optional[torch.FloatTensor] = None,
past_key_value: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
output_attentions: Optional[bool] = False,
) -> Tuple[torch.Tensor]:
self_outputs = self.self(
hidden_states,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
)
attention_output = self.output(self_outputs[0], hidden_states)
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
# Copied from transformers.models.bert.modeling_bert.BertIntermediate with Bert->Splinter
class SplinterIntermediate(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.intermediate_size)
if isinstance(config.hidden_act, str):
self.intermediate_act_fn = ACT2FN[config.hidden_act]
else:
self.intermediate_act_fn = config.hidden_act
def forward(self, hidden_states: torch.Tensor) -> torch.Tensor:
hidden_states = self.dense(hidden_states)
hidden_states = self.intermediate_act_fn(hidden_states)
return hidden_states
# Copied from transformers.models.bert.modeling_bert.BertOutput with Bert->Splinter
class SplinterOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
def forward(self, hidden_states: torch.Tensor, input_tensor: torch.Tensor) -> torch.Tensor:
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
# Copied from transformers.models.bert.modeling_bert.BertLayer with Bert->Splinter
class SplinterLayer(nn.Module):
def __init__(self, config):
super().__init__()
self.chunk_size_feed_forward = config.chunk_size_feed_forward
self.seq_len_dim = 1
self.attention = SplinterAttention(config)
self.is_decoder = config.is_decoder
self.add_cross_attention = config.add_cross_attention
if self.add_cross_attention:
if not self.is_decoder:
raise ValueError(f"{self} should be used as a decoder model if cross attention is added")
self.crossattention = SplinterAttention(config, position_embedding_type="absolute")
self.intermediate = SplinterIntermediate(config)
self.output = SplinterOutput(config)
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.FloatTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
encoder_hidden_states: Optional[torch.FloatTensor] = None,
encoder_attention_mask: Optional[torch.FloatTensor] = None,
past_key_value: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
output_attentions: Optional[bool] = False,
) -> Tuple[torch.Tensor]:
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
self_attention_outputs = self.attention(
hidden_states,
attention_mask,
head_mask,
output_attentions=output_attentions,
past_key_value=self_attn_past_key_value,
)
attention_output = self_attention_outputs[0]
# if decoder, the last output is tuple of self-attn cache
if self.is_decoder:
outputs = self_attention_outputs[1:-1]
present_key_value = self_attention_outputs[-1]
else:
outputs = self_attention_outputs[1:] # add self attentions if we output attention weights
cross_attn_present_key_value = None
if self.is_decoder and encoder_hidden_states is not None:
if not hasattr(self, "crossattention"):
raise ValueError(
f"If `encoder_hidden_states` are passed, {self} has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`"
)
# cross_attn cached key/values tuple is at positions 3,4 of past_key_value tuple
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
cross_attention_outputs = self.crossattention(
attention_output,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
cross_attn_past_key_value,
output_attentions,
)
attention_output = cross_attention_outputs[0]
outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights
# add cross-attn cache to positions 3,4 of present_key_value tuple
cross_attn_present_key_value = cross_attention_outputs[-1]
present_key_value = present_key_value + cross_attn_present_key_value
layer_output = apply_chunking_to_forward(
self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output
)
outputs = (layer_output,) + outputs
# if decoder, return the attn key/values as the last output
if self.is_decoder:
outputs = outputs + (present_key_value,)
return outputs
def feed_forward_chunk(self, attention_output):
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
# Copied from transformers.models.bert.modeling_bert.BertEncoder with Bert->Splinter
class SplinterEncoder(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.layer = nn.ModuleList([SplinterLayer(config) for _ in range(config.num_hidden_layers)])
self.gradient_checkpointing = False
def forward(
self,
hidden_states: torch.Tensor,
attention_mask: Optional[torch.FloatTensor] = None,
head_mask: Optional[torch.FloatTensor] = None,
encoder_hidden_states: Optional[torch.FloatTensor] = None,
encoder_attention_mask: Optional[torch.FloatTensor] = None,
past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = False,
output_hidden_states: Optional[bool] = False,
return_dict: Optional[bool] = True,
) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPastAndCrossAttentions]:
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
next_decoder_cache = () if use_cache else None
for i, layer_module in enumerate(self.layer):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_head_mask = head_mask[i] if head_mask is not None else None
past_key_value = past_key_values[i] if past_key_values is not None else None
if self.gradient_checkpointing and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, past_key_value, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(layer_module),
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
)
else:
layer_outputs = layer_module(
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
)
hidden_states = layer_outputs[0]
if use_cache:
next_decoder_cache += (layer_outputs[-1],)
if output_attentions:
all_self_attentions = all_self_attentions + (layer_outputs[1],)
if self.config.add_cross_attention:
all_cross_attentions = all_cross_attentions + (layer_outputs[2],)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(
v
for v in [
hidden_states,
next_decoder_cache,
all_hidden_states,
all_self_attentions,
all_cross_attentions,
]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=next_decoder_cache,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
cross_attentions=all_cross_attentions,
)
class SplinterPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = SplinterConfig
base_model_prefix = "splinter"
supports_gradient_checkpointing = True
_keys_to_ignore_on_load_missing = [r"position_ids"]
# Copied from transformers.models.bert.modeling_bert.BertPreTrainedModel._init_weights
def _init_weights(self, module):
"""Initialize the weights"""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def _set_gradient_checkpointing(self, module, value=False):
if isinstance(module, SplinterEncoder):
module.gradient_checkpointing = value
SPLINTER_START_DOCSTRING = r"""
This model is a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) sub-class. Use
it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to general usage and
behavior.
Parameters:
config ([`SplinterConfig`]): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.
"""
SPLINTER_INPUTS_DOCSTRING = r"""
Args:
input_ids (`torch.LongTensor` of shape `({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using [`SplinterTokenizer`]. See [`PreTrainedTokenizer.encode`] and
[`PreTrainedTokenizer.__call__`] for details.
[What are input IDs?](../glossary#input-ids)
attention_mask (`torch.FloatTensor` of shape `{0}`, *optional*):
Mask to avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
[What are attention masks?](../glossary#attention-mask)
token_type_ids (`torch.LongTensor` of shape `{0}`, *optional*):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in `[0,
1]`:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
[What are token type IDs?](../glossary#token-type-ids)
position_ids (`torch.LongTensor` of shape `{0}`, *optional*):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range `[0,
config.max_position_embeddings - 1]`.
[What are position IDs?](../glossary#position-ids)
head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):
Mask to nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (`torch.FloatTensor` of shape `({0}, hidden_size)`, *optional*):
Optionally, instead of passing `input_ids` you can choose to directly pass an embedded representation. This
is useful if you want more control over how to convert *input_ids* indices into associated vectors than the
model's internal embedding lookup matrix.
output_attentions (`bool`, *optional*):
Whether or not to return the attentions tensors of all attention layers. See `attentions` under returned
tensors for more detail.
output_hidden_states (`bool`, *optional*):
Whether or not to return the hidden states of all layers. See `hidden_states` under returned tensors for
more detail.
return_dict (`bool`, *optional*):
Whether or not to return a [`~utils.ModelOutput`] instead of a plain tuple.
"""
@add_start_docstrings(
"The bare Splinter Model transformer outputting raw hidden-states without any specific head on top.",
SPLINTER_START_DOCSTRING,
)
class SplinterModel(SplinterPreTrainedModel):
"""
The model is an encoder (with only self-attention) following the architecture described in [Attention is all you
need](https://arxiv.org/abs/1706.03762) by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit, Llion Jones,
Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin.
"""
def __init__(self, config):
super().__init__(config)
self.config = config
self.embeddings = SplinterEmbeddings(config)
self.encoder = SplinterEncoder(config)
# Initialize weights and apply final processing
self.post_init()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(SPLINTER_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutputWithPastAndCrossAttentions,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
encoder_hidden_states: Optional[torch.Tensor] = None,
encoder_attention_mask: Optional[torch.Tensor] = None,
past_key_values: Optional[List[torch.FloatTensor]] = None,
use_cache: Optional[bool] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
) -> Union[Tuple, BaseModelOutputWithPastAndCrossAttentions]:
r"""
encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
the model is configured as a decoder.
encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
the cross-attention if the model is configured as a decoder. Mask values selected in `[0, 1]`:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
past_key_values (`tuple(tuple(torch.FloatTensor))` of length `config.n_layers` with each tuple having 4 tensors of shape `(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If `past_key_values` are used, the user can optionally input only the last `decoder_input_ids` (those that
don't have their past key value states given to this model) of shape `(batch_size, 1)` instead of all
`decoder_input_ids` of shape `(batch_size, sequence_length)`.
use_cache (`bool`, *optional*):
If set to `True`, `past_key_values` key value states are returned and can be used to speed up decoding (see
`past_key_values`).
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if self.config.is_decoder:
use_cache = use_cache if use_cache is not None else self.config.use_cache
else:
use_cache = False
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
batch_size, seq_length = input_shape
device = input_ids.device if input_ids is not None else inputs_embeds.device
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device)
if token_type_ids is None:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, device)
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds,
past_key_values_length=past_key_values_length,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
if not return_dict:
return (sequence_output,) + encoder_outputs[1:]
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=sequence_output,
past_key_values=encoder_outputs.past_key_values,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
cross_attentions=encoder_outputs.cross_attentions,
)
class SplinterFullyConnectedLayer(nn.Module):
def __init__(self, input_dim, output_dim, hidden_act="gelu"):
super().__init__()
self.input_dim = input_dim
self.output_dim = output_dim
self.dense = nn.Linear(self.input_dim, self.output_dim)
self.act_fn = ACT2FN[hidden_act]
self.LayerNorm = nn.LayerNorm(self.output_dim)
def forward(self, inputs: torch.Tensor) -> torch.Tensor:
hidden_states = self.dense(inputs)
hidden_states = self.act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
class QuestionAwareSpanSelectionHead(nn.Module):
"""
Implementation of Question-Aware Span Selection (QASS) head, described in Splinter's paper:
"""
def __init__(self, config):
super().__init__()
self.query_start_transform = SplinterFullyConnectedLayer(config.hidden_size, config.hidden_size)
self.query_end_transform = SplinterFullyConnectedLayer(config.hidden_size, config.hidden_size)
self.start_transform = SplinterFullyConnectedLayer(config.hidden_size, config.hidden_size)
self.end_transform = SplinterFullyConnectedLayer(config.hidden_size, config.hidden_size)
self.start_classifier = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
self.end_classifier = nn.Linear(config.hidden_size, config.hidden_size, bias=False)
def forward(self, inputs, positions):
_, _, dim = inputs.size()
index = positions.unsqueeze(-1).repeat(1, 1, dim) # [batch_size, num_positions, dim]
gathered_reps = torch.gather(inputs, dim=1, index=index) # [batch_size, num_positions, dim]
query_start_reps = self.query_start_transform(gathered_reps) # [batch_size, num_positions, dim]
query_end_reps = self.query_end_transform(gathered_reps) # [batch_size, num_positions, dim]
start_reps = self.start_transform(inputs) # [batch_size, seq_length, dim]
end_reps = self.end_transform(inputs) # [batch_size, seq_length, dim]
hidden_states = self.start_classifier(query_start_reps) # [batch_size, num_positions, dim]
start_reps = start_reps.permute(0, 2, 1) # [batch_size, dim, seq_length]
start_logits = torch.matmul(hidden_states, start_reps)
hidden_states = self.end_classifier(query_end_reps)
end_reps = end_reps.permute(0, 2, 1)
end_logits = torch.matmul(hidden_states, end_reps)
return start_logits, end_logits
@add_start_docstrings(
"""
Splinter Model with a span classification head on top for extractive question-answering tasks like SQuAD (a linear
layers on top of the hidden-states output to compute `span start logits` and `span end logits`).
""",
SPLINTER_START_DOCSTRING,
)
class SplinterForQuestionAnswering(SplinterPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.splinter = SplinterModel(config)
self.splinter_qass = QuestionAwareSpanSelectionHead(config)
self.question_token_id = config.question_token_id
# Initialize weights and apply final processing
self.post_init()
@add_start_docstrings_to_model_forward(SPLINTER_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
processor_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=QuestionAnsweringModelOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids: Optional[torch.Tensor] = None,
attention_mask: Optional[torch.Tensor] = None,
token_type_ids: Optional[torch.Tensor] = None,
position_ids: Optional[torch.Tensor] = None,
head_mask: Optional[torch.Tensor] = None,
inputs_embeds: Optional[torch.Tensor] = None,
start_positions: Optional[torch.LongTensor] = None,
end_positions: Optional[torch.LongTensor] = None,
output_attentions: Optional[bool] = None,
output_hidden_states: Optional[bool] = None,
return_dict: Optional[bool] = None,
question_positions: Optional[torch.LongTensor] = None,
) -> Union[Tuple, QuestionAnsweringModelOutput]:
r"""
start_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for position (index) of the start of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
are not taken into account for computing the loss.
end_positions (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
Labels for position (index) of the end of the labelled span for computing the token classification loss.
Positions are clamped to the length of the sequence (`sequence_length`). Position outside of the sequence
are not taken into account for computing the loss.
question_positions (`torch.LongTensor` of shape `(batch_size, num_questions)`, *optional*):
The positions of all question tokens. If given, start_logits and end_logits will be of shape `(batch_size,
num_questions, sequence_length)`. If None, the first question token in each sequence in the batch will be
the only one for which start_logits and end_logits are calculated and they will be of shape `(batch_size,
sequence_length)`.
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
question_positions_were_none = False
if question_positions is None:
if input_ids is not None:
question_position_for_each_example = torch.argmax(
(torch.eq(input_ids, self.question_token_id)).int(), dim=-1
)
else:
question_position_for_each_example = torch.zeros(
inputs_embeds.size(0), dtype=torch.long, layout=inputs_embeds.layout, device=inputs_embeds.device
)
question_positions = question_position_for_each_example.unsqueeze(-1)
question_positions_were_none = True
outputs = self.splinter(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = outputs[0]
start_logits, end_logits = self.splinter_qass(sequence_output, question_positions)
if question_positions_were_none:
start_logits, end_logits = start_logits.squeeze(1), end_logits.squeeze(1)
if attention_mask is not None:
start_logits = start_logits + (1 - attention_mask) * -10000.0
end_logits = end_logits + (1 - attention_mask) * -10000.0
total_loss = None
if start_positions is not None and end_positions is not None:
# If we are on multi-GPU, split add a dimension
if len(start_positions.size()) > 1:
start_positions = start_positions.squeeze(-1)
if len(end_positions.size()) > 1:
end_positions = end_positions.squeeze(-1)
# sometimes the start/end positions are outside our model inputs, we ignore these terms
ignored_index = start_logits.size(1)
start_positions.clamp_(0, ignored_index)
end_positions.clamp_(0, ignored_index)
loss_fct = CrossEntropyLoss(ignore_index=ignored_index)
start_loss = loss_fct(start_logits, start_positions)
end_loss = loss_fct(end_logits, end_positions)
total_loss = (start_loss + end_loss) / 2
if not return_dict:
output = (start_logits, end_logits) + outputs[1:]
return ((total_loss,) + output) if total_loss is not None else output
return QuestionAnsweringModelOutput(
loss=total_loss,
start_logits=start_logits,
end_logits=end_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
| [
"noreply@github.com"
] | stas00.noreply@github.com |
d0017561c6f6ad25a1c34c2406c04f4c6a962b52 | 19380415ccdcb0dac20f7bd67fcc8a0f631a3b90 | /codeforces/727A.py | 8ed364f5f8143382bfbf97c6d20628a693a1495f | [
"MIT"
] | permissive | italo-batista/problems-solving | c06c811364db7439d842db76e743dd7a1a7c8365 | f83ad34f0abebd52925c4020635556f20743ba06 | refs/heads/master | 2021-10-28T07:01:21.643218 | 2019-04-22T15:27:19 | 2019-04-22T15:27:19 | 76,066,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 910 | py | # LINK FOR PROBLEM: http://codeforces.com/problemset/problem/727/A
def ends_with_one(number):
number = str(number)
return number[-1] == "1"
def del_last_one(number):
number = str(number)
t = len(number)
number = int( number[:t-1] )
return number
def isEven(number):
return (number % 2 == 0)
a, b = map(int, raw_input().split())
fila = [str(b)]
current_number = b
while current_number > a:
if ends_with_one(current_number):
current_number = del_last_one(current_number)
fila.append(str(current_number))
elif isEven(current_number):
current_number = current_number / 2
fila.append(str(current_number))
else:
current_number = a-1
break
if current_number < a:
print "NO"
elif current_number == a:
print "YES"
print len(fila)
print " ".join(str(fila[i]) for i in range(len(fila)-1, -1, -1 ))
| [
"italo.batista@ccc.ufcg.edu.br"
] | italo.batista@ccc.ufcg.edu.br |
7b1fbc45105a4352caa4fbb01235b2f5dcdfa5b7 | a3e52fbdfc81da3d17fee3d11b4451b330bfd592 | /CompPython/tutoriais/bloghackerearth/numpyTutorial.py | 5b484ed95488bf81857c0275ab3b14db7745d0cb | [] | no_license | chrislucas/python | 79633915dd0aa8724ae3dfc5a3a32053f7a4f1e0 | d3cca374f87e134a7ddfc327a6daea983875ecac | refs/heads/master | 2021-01-17T04:08:25.056580 | 2016-12-26T11:41:31 | 2016-12-26T11:41:31 | 42,319,868 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 724 | py | '''
Created on 25 de nov de 2016
@author: C.Lucas
'''
if __name__ == '__main__':
pass
'''
http://blog.hackerearth.com/prerequisites-linear-algebra-machine-learning
'''
import numpy as np
from numpy import abs, array, eye
def test_numpy_abs():
print(abs([-1.2, 1.2]))
def test_numpy_array():
matrix = array([[1,2,3],[3,2,1]])
print(*matrix)
print(matrix.shape)
def identity_matrix(n):
print(eye(n))
def add_matrix():
a = array([ [1,2,3],[3,2,1] ])
b = array([ [1,2,3],[3,2,1] ])
return np.add(a, b)
#print(add_matrix())
'''
http://cs231n.github.io/python-numpy-tutorial/
'''
print(np.transpose(add_matrix()))
print(np.ones((1, 2, 3)))
#print(np.arange(4).reshape((2,2))) | [
"christoffer.luccas@gmail.com"
] | christoffer.luccas@gmail.com |
48cbda302d0e9345f8c476a9d0d78fc6254bd7e4 | 3ac0a169aa2a123e164f7434281bc9dd6373d341 | /sortedArrayToBST.py | 590e2e06c1ece805f11ef7d6d7957e2f01800c4c | [] | no_license | sfeng77/myleetcode | 02a028b5ca5a0354e99b8fb758883902a768f410 | a2841fdb624548fdc6ef430e23ca46f3300e0558 | refs/heads/master | 2021-01-23T02:06:37.569936 | 2017-04-21T20:31:06 | 2017-04-21T20:31:06 | 85,967,955 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 509 | py | class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
n = len(nums)
if n == 0:
return None
m = n / 2
node = TreeNode(nums[m])
node.left = self.sortedArrayToBST(nums[:m])
node.right = self.sortedArrayToBST(nums[m+1:])
return node
| [
"sfeng77@gmail.com"
] | sfeng77@gmail.com |
9b7ca51648662bc507359f911a2b0c1d348fd579 | 09c39de5aad7b283cfac2f09a2b93e43086846d2 | /Unit 07 Lists and Functions/01 Lists and Functions/Function Recap/6-More than one Argument.py | 5bd98430a7fc7cfb13218ce3b0036e91af532544 | [
"MIT"
] | permissive | lpython2006e/python-samples | b4e84080259faf75b41fb2fd4fb9d2fbc9f857aa | b94ba67ce0d7798ecf796dadae206aa75da58301 | refs/heads/master | 2023-01-21T13:16:13.295163 | 2020-11-29T11:01:50 | 2020-11-29T11:01:50 | 278,653,779 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | m = 5
n = 13
# Add add_function here!
def add_function(x, y):
return x + y
print(add_function(m, n))
| [
"lent@hivetech.vn"
] | lent@hivetech.vn |
663d9e55f574ee82bb6d9ecf79a6f7ee71df9a65 | f06d9cd5fb86885a73ee997c687f3294840dd199 | /setuser.py | 169f54a1c0f7ac711cdde780e1f9ecd13d856b49 | [] | no_license | bu2/oauth-proxy | aaff16a07d5c2c07c8243293c9ed41205b251a74 | dbed492f8a806c36177a56ca626f005acec904b1 | refs/heads/master | 2020-12-26T15:53:40.618570 | 2013-07-09T05:06:16 | 2013-07-09T05:06:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,376 | py | import getpass
import os
import sys
from werkzeug.datastructures import MultiDict
import models
import forms
# Make sure the database gets installed properly
models.db.create_all()
values = MultiDict()
form = forms.SetUser(values)
values['email'] = sys.argv[1] if len(sys.argv) > 1 else raw_input('%s: ' % form.email.label.text)
form = forms.SetUser(values)
form.validate()
if form.email.errors:
sys.exit('\n'.join(' ! %s' % e for e in form.email.errors))
if models.User.query.filter_by(email=form.email.data).count():
print '%s already exists, setting the password' % form.email.data
values['password'] = getpass.getpass('%s: ' % form.password.label.text)
form = forms.SetUser(values)
form.validate()
if form.password.errors:
sys.exit('\n'.join(' ! %s' % e for e in form.password.errors))
values['retype'] = getpass.getpass('%s: ' % form.retype.label.text)
form = forms.SetUser(values)
form.validate()
if form.retype.errors:
sys.exit('\n'.join(' ! %s' % e for e in form.retype.errors))
user = models.User.query.filter_by(email=form.email.data).first()
if user:
user.set_password(form.password.data)
msg = 'Updated password for %s' % user.email
else:
user = models.User(email=form.email.data, password=form.password.data)
msg = 'Created account for %s' % user.email
models.db.session.add(user)
models.db.session.commit()
print msg
| [
"marty@martyalchin.com"
] | marty@martyalchin.com |
f5791441ecd21aa9610b5a7fe8438953b39bd286 | eaf25a735c2e8fefcf5a84786bf2b6015ed05793 | /sdk/sql/azure-mgmt-sql/azure/mgmt/sql/models/_models_py3.py | d115113635ee20f04057ec8cc3422ffe14497b6a | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | KieranBrantnerMagee/azure-sdk-for-python | cd7871a38b67b63fa32ccadbdd26cffa3a16cec3 | d4f33ed6cc9686b7ef751eb5d727ae6ec6ee1253 | refs/heads/master | 2021-06-22T01:55:33.669192 | 2019-10-02T19:14:39 | 2019-10-02T19:14:39 | 210,954,237 | 0 | 0 | MIT | 2020-03-13T01:27:44 | 2019-09-25T22:47:44 | Python | UTF-8 | Python | false | false | 383,369 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AutomaticTuningOptions(Model):
"""Automatic tuning properties for individual advisors.
Variables are only populated by the server, and will be ignored when
sending a request.
:param desired_state: Automatic tuning option desired state. Possible
values include: 'Off', 'On', 'Default'
:type desired_state: str or
~azure.mgmt.sql.models.AutomaticTuningOptionModeDesired
:ivar actual_state: Automatic tuning option actual state. Possible values
include: 'Off', 'On'
:vartype actual_state: str or
~azure.mgmt.sql.models.AutomaticTuningOptionModeActual
:ivar reason_code: Reason code if desired and actual state are different.
:vartype reason_code: int
:ivar reason_desc: Reason description if desired and actual state are
different. Possible values include: 'Default', 'Disabled',
'AutoConfigured', 'InheritedFromServer', 'QueryStoreOff',
'QueryStoreReadOnly', 'NotSupported'
:vartype reason_desc: str or
~azure.mgmt.sql.models.AutomaticTuningDisabledReason
"""
_validation = {
'actual_state': {'readonly': True},
'reason_code': {'readonly': True},
'reason_desc': {'readonly': True},
}
_attribute_map = {
'desired_state': {'key': 'desiredState', 'type': 'AutomaticTuningOptionModeDesired'},
'actual_state': {'key': 'actualState', 'type': 'AutomaticTuningOptionModeActual'},
'reason_code': {'key': 'reasonCode', 'type': 'int'},
'reason_desc': {'key': 'reasonDesc', 'type': 'AutomaticTuningDisabledReason'},
}
def __init__(self, *, desired_state=None, **kwargs) -> None:
super(AutomaticTuningOptions, self).__init__(**kwargs)
self.desired_state = desired_state
self.actual_state = None
self.reason_code = None
self.reason_desc = None
class AutomaticTuningServerOptions(Model):
"""Automatic tuning properties for individual advisors.
Variables are only populated by the server, and will be ignored when
sending a request.
:param desired_state: Automatic tuning option desired state. Possible
values include: 'Off', 'On', 'Default'
:type desired_state: str or
~azure.mgmt.sql.models.AutomaticTuningOptionModeDesired
:ivar actual_state: Automatic tuning option actual state. Possible values
include: 'Off', 'On'
:vartype actual_state: str or
~azure.mgmt.sql.models.AutomaticTuningOptionModeActual
:ivar reason_code: Reason code if desired and actual state are different.
:vartype reason_code: int
:ivar reason_desc: Reason description if desired and actual state are
different. Possible values include: 'Default', 'Disabled',
'AutoConfigured'
:vartype reason_desc: str or
~azure.mgmt.sql.models.AutomaticTuningServerReason
"""
_validation = {
'actual_state': {'readonly': True},
'reason_code': {'readonly': True},
'reason_desc': {'readonly': True},
}
_attribute_map = {
'desired_state': {'key': 'desiredState', 'type': 'AutomaticTuningOptionModeDesired'},
'actual_state': {'key': 'actualState', 'type': 'AutomaticTuningOptionModeActual'},
'reason_code': {'key': 'reasonCode', 'type': 'int'},
'reason_desc': {'key': 'reasonDesc', 'type': 'AutomaticTuningServerReason'},
}
def __init__(self, *, desired_state=None, **kwargs) -> None:
super(AutomaticTuningServerOptions, self).__init__(**kwargs)
self.desired_state = desired_state
self.actual_state = None
self.reason_code = None
self.reason_desc = None
class Resource(Model):
"""ARM resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class ProxyResource(Resource):
"""ARM proxy resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(ProxyResource, self).__init__(**kwargs)
class BackupLongTermRetentionPolicy(ProxyResource):
"""A long term retention policy.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param weekly_retention: The weekly retention policy for an LTR backup in
an ISO 8601 format.
:type weekly_retention: str
:param monthly_retention: The monthly retention policy for an LTR backup
in an ISO 8601 format.
:type monthly_retention: str
:param yearly_retention: The yearly retention policy for an LTR backup in
an ISO 8601 format.
:type yearly_retention: str
:param week_of_year: The week of year to take the yearly backup in an ISO
8601 format.
:type week_of_year: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'weekly_retention': {'key': 'properties.weeklyRetention', 'type': 'str'},
'monthly_retention': {'key': 'properties.monthlyRetention', 'type': 'str'},
'yearly_retention': {'key': 'properties.yearlyRetention', 'type': 'str'},
'week_of_year': {'key': 'properties.weekOfYear', 'type': 'int'},
}
def __init__(self, *, weekly_retention: str=None, monthly_retention: str=None, yearly_retention: str=None, week_of_year: int=None, **kwargs) -> None:
super(BackupLongTermRetentionPolicy, self).__init__(**kwargs)
self.weekly_retention = weekly_retention
self.monthly_retention = monthly_retention
self.yearly_retention = yearly_retention
self.week_of_year = week_of_year
class BackupShortTermRetentionPolicy(ProxyResource):
"""A short term retention policy.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param retention_days: The backup retention period in days. This is how
many days Point-in-Time Restore will be supported.
:type retention_days: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
}
def __init__(self, *, retention_days: int=None, **kwargs) -> None:
super(BackupShortTermRetentionPolicy, self).__init__(**kwargs)
self.retention_days = retention_days
class CheckNameAvailabilityRequest(Model):
"""A request to check whether the specified name for a resource is available.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name whose availability is to be checked.
:type name: str
:ivar type: Required. The type of resource that is used as the scope of
the availability check. Default value: "Microsoft.Sql/servers" .
:vartype type: str
"""
_validation = {
'name': {'required': True},
'type': {'required': True, 'constant': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
type = "Microsoft.Sql/servers"
def __init__(self, *, name: str, **kwargs) -> None:
super(CheckNameAvailabilityRequest, self).__init__(**kwargs)
self.name = name
class CheckNameAvailabilityResponse(Model):
"""A response indicating whether the specified name for a resource is
available.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar available: True if the name is available, otherwise false.
:vartype available: bool
:ivar message: A message explaining why the name is unavailable. Will be
null if the name is available.
:vartype message: str
:ivar name: The name whose availability was checked.
:vartype name: str
:ivar reason: The reason code explaining why the name is unavailable. Will
be null if the name is available. Possible values include: 'Invalid',
'AlreadyExists'
:vartype reason: str or ~azure.mgmt.sql.models.CheckNameAvailabilityReason
"""
_validation = {
'available': {'readonly': True},
'message': {'readonly': True},
'name': {'readonly': True},
'reason': {'readonly': True},
}
_attribute_map = {
'available': {'key': 'available', 'type': 'bool'},
'message': {'key': 'message', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'reason': {'key': 'reason', 'type': 'CheckNameAvailabilityReason'},
}
def __init__(self, **kwargs) -> None:
super(CheckNameAvailabilityResponse, self).__init__(**kwargs)
self.available = None
self.message = None
self.name = None
self.reason = None
class CloudError(Model):
"""CloudError.
"""
_attribute_map = {
}
class CompleteDatabaseRestoreDefinition(Model):
"""Contains the information necessary to perform a complete database restore
operation.
All required parameters must be populated in order to send to Azure.
:param last_backup_name: Required. The last backup name to apply
:type last_backup_name: str
"""
_validation = {
'last_backup_name': {'required': True},
}
_attribute_map = {
'last_backup_name': {'key': 'lastBackupName', 'type': 'str'},
}
def __init__(self, *, last_backup_name: str, **kwargs) -> None:
super(CompleteDatabaseRestoreDefinition, self).__init__(**kwargs)
self.last_backup_name = last_backup_name
class CreateDatabaseRestorePointDefinition(Model):
"""Contains the information necessary to perform a create database restore
point operation.
All required parameters must be populated in order to send to Azure.
:param restore_point_label: Required. The restore point label to apply
:type restore_point_label: str
"""
_validation = {
'restore_point_label': {'required': True},
}
_attribute_map = {
'restore_point_label': {'key': 'restorePointLabel', 'type': 'str'},
}
def __init__(self, *, restore_point_label: str, **kwargs) -> None:
super(CreateDatabaseRestorePointDefinition, self).__init__(**kwargs)
self.restore_point_label = restore_point_label
class TrackedResource(Resource):
"""ARM tracked top level resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, location: str, tags=None, **kwargs) -> None:
super(TrackedResource, self).__init__(**kwargs)
self.location = location
self.tags = tags
class Database(TrackedResource):
"""A database resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param sku: The database SKU.
The list of SKUs may vary by region and support offer. To determine the
SKUs (including the SKU name, tier/edition, family, and capacity) that are
available to your subscription in an Azure region, use the
`Capabilities_ListByLocation` REST API or one of the following commands:
```azurecli
az sql db list-editions -l <location> -o table
````
```powershell
Get-AzSqlServerServiceObjective -Location <location>
````
:type sku: ~azure.mgmt.sql.models.Sku
:ivar kind: Kind of database. This is metadata used for the Azure portal
experience.
:vartype kind: str
:ivar managed_by: Resource that manages the database.
:vartype managed_by: str
:param create_mode: Specifies the mode of database creation.
Default: regular database creation.
Copy: creates a database as a copy of an existing database.
sourceDatabaseId must be specified as the resource ID of the source
database.
Secondary: creates a database as a secondary replica of an existing
database. sourceDatabaseId must be specified as the resource ID of the
existing primary database.
PointInTimeRestore: Creates a database by restoring a point in time backup
of an existing database. sourceDatabaseId must be specified as the
resource ID of the existing database, and restorePointInTime must be
specified.
Recovery: Creates a database by restoring a geo-replicated backup.
sourceDatabaseId must be specified as the recoverable database resource ID
to restore.
Restore: Creates a database by restoring a backup of a deleted database.
sourceDatabaseId must be specified. If sourceDatabaseId is the database's
original resource ID, then sourceDatabaseDeletionDate must be specified.
Otherwise sourceDatabaseId must be the restorable dropped database
resource ID and sourceDatabaseDeletionDate is ignored. restorePointInTime
may also be specified to restore from an earlier point in time.
RestoreLongTermRetentionBackup: Creates a database by restoring from a
long term retention vault. recoveryServicesRecoveryPointResourceId must be
specified as the recovery point resource ID.
Copy, Secondary, and RestoreLongTermRetentionBackup are not supported for
DataWarehouse edition. Possible values include: 'Default', 'Copy',
'Secondary', 'PointInTimeRestore', 'Restore', 'Recovery',
'RestoreExternalBackup', 'RestoreExternalBackupSecondary',
'RestoreLongTermRetentionBackup', 'OnlineSecondary'
:type create_mode: str or ~azure.mgmt.sql.models.CreateMode
:param collation: The collation of the database.
:type collation: str
:param max_size_bytes: The max size of the database expressed in bytes.
:type max_size_bytes: long
:param sample_name: The name of the sample schema to apply when creating
this database. Possible values include: 'AdventureWorksLT',
'WideWorldImportersStd', 'WideWorldImportersFull'
:type sample_name: str or ~azure.mgmt.sql.models.SampleName
:param elastic_pool_id: The resource identifier of the elastic pool
containing this database.
:type elastic_pool_id: str
:param source_database_id: The resource identifier of the source database
associated with create operation of this database.
:type source_database_id: str
:ivar status: The status of the database. Possible values include:
'Online', 'Restoring', 'RecoveryPending', 'Recovering', 'Suspect',
'Offline', 'Standby', 'Shutdown', 'EmergencyMode', 'AutoClosed',
'Copying', 'Creating', 'Inaccessible', 'OfflineSecondary', 'Pausing',
'Paused', 'Resuming', 'Scaling', 'OfflineChangingDwPerformanceTiers',
'OnlineChangingDwPerformanceTiers', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.DatabaseStatus
:ivar database_id: The ID of the database.
:vartype database_id: str
:ivar creation_date: The creation date of the database (ISO8601 format).
:vartype creation_date: datetime
:ivar current_service_objective_name: The current service level objective
name of the database.
:vartype current_service_objective_name: str
:ivar requested_service_objective_name: The requested service level
objective name of the database.
:vartype requested_service_objective_name: str
:ivar default_secondary_location: The default secondary region for this
database.
:vartype default_secondary_location: str
:ivar failover_group_id: Failover Group resource identifier that this
database belongs to.
:vartype failover_group_id: str
:param restore_point_in_time: Specifies the point in time (ISO8601 format)
of the source database that will be restored to create the new database.
:type restore_point_in_time: datetime
:param source_database_deletion_date: Specifies the time that the database
was deleted.
:type source_database_deletion_date: datetime
:param recovery_services_recovery_point_id: The resource identifier of the
recovery point associated with create operation of this database.
:type recovery_services_recovery_point_id: str
:param long_term_retention_backup_resource_id: The resource identifier of
the long term retention backup associated with create operation of this
database.
:type long_term_retention_backup_resource_id: str
:param recoverable_database_id: The resource identifier of the recoverable
database associated with create operation of this database.
:type recoverable_database_id: str
:param restorable_dropped_database_id: The resource identifier of the
restorable dropped database associated with create operation of this
database.
:type restorable_dropped_database_id: str
:param catalog_collation: Collation of the metadata catalog. Possible
values include: 'DATABASE_DEFAULT', 'SQL_Latin1_General_CP1_CI_AS'
:type catalog_collation: str or
~azure.mgmt.sql.models.CatalogCollationType
:param zone_redundant: Whether or not this database is zone redundant,
which means the replicas of this database will be spread across multiple
availability zones.
:type zone_redundant: bool
:param license_type: The license type to apply for this database. Possible
values include: 'LicenseIncluded', 'BasePrice'
:type license_type: str or ~azure.mgmt.sql.models.DatabaseLicenseType
:ivar max_log_size_bytes: The max log size for this database.
:vartype max_log_size_bytes: long
:ivar earliest_restore_date: This records the earliest start date and time
that restore is available for this database (ISO8601 format).
:vartype earliest_restore_date: datetime
:param read_scale: If enabled, connections that have application intent
set to readonly in their connection string may be routed to a readonly
secondary replica. This property is only settable for Premium and Business
Critical databases. Possible values include: 'Enabled', 'Disabled'
:type read_scale: str or ~azure.mgmt.sql.models.DatabaseReadScale
:param read_replica_count: The number of readonly secondary replicas
associated with the database to which readonly application intent
connections may be routed. This property is only settable for Hyperscale
edition databases.
:type read_replica_count: int
:ivar current_sku: The name and tier of the SKU.
:vartype current_sku: ~azure.mgmt.sql.models.Sku
:param auto_pause_delay: Time in minutes after which database is
automatically paused. A value of -1 means that automatic pause is disabled
:type auto_pause_delay: int
:param min_capacity: Minimal capacity that database will always have
allocated, if not paused
:type min_capacity: float
:ivar paused_date: The date when database was paused by user configuration
or action (ISO8601 format). Null if the database is ready.
:vartype paused_date: datetime
:ivar resumed_date: The date when database was resumed by user action or
database login (ISO8601 format). Null if the database is paused.
:vartype resumed_date: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'kind': {'readonly': True},
'managed_by': {'readonly': True},
'status': {'readonly': True},
'database_id': {'readonly': True},
'creation_date': {'readonly': True},
'current_service_objective_name': {'readonly': True},
'requested_service_objective_name': {'readonly': True},
'default_secondary_location': {'readonly': True},
'failover_group_id': {'readonly': True},
'max_log_size_bytes': {'readonly': True},
'earliest_restore_date': {'readonly': True},
'current_sku': {'readonly': True},
'paused_date': {'readonly': True},
'resumed_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'kind': {'key': 'kind', 'type': 'str'},
'managed_by': {'key': 'managedBy', 'type': 'str'},
'create_mode': {'key': 'properties.createMode', 'type': 'str'},
'collation': {'key': 'properties.collation', 'type': 'str'},
'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'},
'sample_name': {'key': 'properties.sampleName', 'type': 'str'},
'elastic_pool_id': {'key': 'properties.elasticPoolId', 'type': 'str'},
'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'database_id': {'key': 'properties.databaseId', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'current_service_objective_name': {'key': 'properties.currentServiceObjectiveName', 'type': 'str'},
'requested_service_objective_name': {'key': 'properties.requestedServiceObjectiveName', 'type': 'str'},
'default_secondary_location': {'key': 'properties.defaultSecondaryLocation', 'type': 'str'},
'failover_group_id': {'key': 'properties.failoverGroupId', 'type': 'str'},
'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'},
'source_database_deletion_date': {'key': 'properties.sourceDatabaseDeletionDate', 'type': 'iso-8601'},
'recovery_services_recovery_point_id': {'key': 'properties.recoveryServicesRecoveryPointId', 'type': 'str'},
'long_term_retention_backup_resource_id': {'key': 'properties.longTermRetentionBackupResourceId', 'type': 'str'},
'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'},
'restorable_dropped_database_id': {'key': 'properties.restorableDroppedDatabaseId', 'type': 'str'},
'catalog_collation': {'key': 'properties.catalogCollation', 'type': 'str'},
'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'max_log_size_bytes': {'key': 'properties.maxLogSizeBytes', 'type': 'long'},
'earliest_restore_date': {'key': 'properties.earliestRestoreDate', 'type': 'iso-8601'},
'read_scale': {'key': 'properties.readScale', 'type': 'str'},
'read_replica_count': {'key': 'properties.readReplicaCount', 'type': 'int'},
'current_sku': {'key': 'properties.currentSku', 'type': 'Sku'},
'auto_pause_delay': {'key': 'properties.autoPauseDelay', 'type': 'int'},
'min_capacity': {'key': 'properties.minCapacity', 'type': 'float'},
'paused_date': {'key': 'properties.pausedDate', 'type': 'iso-8601'},
'resumed_date': {'key': 'properties.resumedDate', 'type': 'iso-8601'},
}
def __init__(self, *, location: str, tags=None, sku=None, create_mode=None, collation: str=None, max_size_bytes: int=None, sample_name=None, elastic_pool_id: str=None, source_database_id: str=None, restore_point_in_time=None, source_database_deletion_date=None, recovery_services_recovery_point_id: str=None, long_term_retention_backup_resource_id: str=None, recoverable_database_id: str=None, restorable_dropped_database_id: str=None, catalog_collation=None, zone_redundant: bool=None, license_type=None, read_scale=None, read_replica_count: int=None, auto_pause_delay: int=None, min_capacity: float=None, **kwargs) -> None:
super(Database, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.kind = None
self.managed_by = None
self.create_mode = create_mode
self.collation = collation
self.max_size_bytes = max_size_bytes
self.sample_name = sample_name
self.elastic_pool_id = elastic_pool_id
self.source_database_id = source_database_id
self.status = None
self.database_id = None
self.creation_date = None
self.current_service_objective_name = None
self.requested_service_objective_name = None
self.default_secondary_location = None
self.failover_group_id = None
self.restore_point_in_time = restore_point_in_time
self.source_database_deletion_date = source_database_deletion_date
self.recovery_services_recovery_point_id = recovery_services_recovery_point_id
self.long_term_retention_backup_resource_id = long_term_retention_backup_resource_id
self.recoverable_database_id = recoverable_database_id
self.restorable_dropped_database_id = restorable_dropped_database_id
self.catalog_collation = catalog_collation
self.zone_redundant = zone_redundant
self.license_type = license_type
self.max_log_size_bytes = None
self.earliest_restore_date = None
self.read_scale = read_scale
self.read_replica_count = read_replica_count
self.current_sku = None
self.auto_pause_delay = auto_pause_delay
self.min_capacity = min_capacity
self.paused_date = None
self.resumed_date = None
class DatabaseAutomaticTuning(ProxyResource):
"""Database-level Automatic Tuning.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param desired_state: Automatic tuning desired state. Possible values
include: 'Inherit', 'Custom', 'Auto', 'Unspecified'
:type desired_state: str or ~azure.mgmt.sql.models.AutomaticTuningMode
:ivar actual_state: Automatic tuning actual state. Possible values
include: 'Inherit', 'Custom', 'Auto', 'Unspecified'
:vartype actual_state: str or ~azure.mgmt.sql.models.AutomaticTuningMode
:param options: Automatic tuning options definition.
:type options: dict[str, ~azure.mgmt.sql.models.AutomaticTuningOptions]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'actual_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'desired_state': {'key': 'properties.desiredState', 'type': 'AutomaticTuningMode'},
'actual_state': {'key': 'properties.actualState', 'type': 'AutomaticTuningMode'},
'options': {'key': 'properties.options', 'type': '{AutomaticTuningOptions}'},
}
def __init__(self, *, desired_state=None, options=None, **kwargs) -> None:
super(DatabaseAutomaticTuning, self).__init__(**kwargs)
self.desired_state = desired_state
self.actual_state = None
self.options = options
class DatabaseBlobAuditingPolicy(ProxyResource):
"""A database blob auditing policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Resource kind.
:vartype kind: str
:param state: Required. Specifies the state of the policy. If state is
Enabled, storageEndpoint or isAzureMonitorTargetEnabled are required.
Possible values include: 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.BlobAuditingPolicyState
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). If state is Enabled,
storageEndpoint is required.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
auditing storage account. If state is Enabled and storageEndpoint is
specified, storageAccountAccessKey is required.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the audit
logs in the storage account.
:type retention_days: int
:param audit_actions_and_groups: Specifies the Actions-Groups and Actions
to audit.
The recommended set of action groups to use is the following combination -
this will audit all the queries and stored procedures executed against the
database, as well as successful and failed logins:
BATCH_COMPLETED_GROUP,
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP,
FAILED_DATABASE_AUTHENTICATION_GROUP.
This above combination is also the set that is configured by default when
enabling auditing from the Azure portal.
The supported action groups to audit are (note: choose only specific
groups that cover your auditing needs. Using unnecessary groups could lead
to very large quantities of audit records):
APPLICATION_ROLE_CHANGE_PASSWORD_GROUP
BACKUP_RESTORE_GROUP
DATABASE_LOGOUT_GROUP
DATABASE_OBJECT_CHANGE_GROUP
DATABASE_OBJECT_OWNERSHIP_CHANGE_GROUP
DATABASE_OBJECT_PERMISSION_CHANGE_GROUP
DATABASE_OPERATION_GROUP
DATABASE_PERMISSION_CHANGE_GROUP
DATABASE_PRINCIPAL_CHANGE_GROUP
DATABASE_PRINCIPAL_IMPERSONATION_GROUP
DATABASE_ROLE_MEMBER_CHANGE_GROUP
FAILED_DATABASE_AUTHENTICATION_GROUP
SCHEMA_OBJECT_ACCESS_GROUP
SCHEMA_OBJECT_CHANGE_GROUP
SCHEMA_OBJECT_OWNERSHIP_CHANGE_GROUP
SCHEMA_OBJECT_PERMISSION_CHANGE_GROUP
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP
USER_CHANGE_PASSWORD_GROUP
BATCH_STARTED_GROUP
BATCH_COMPLETED_GROUP
These are groups that cover all sql statements and stored procedures
executed against the database, and should not be used in combination with
other groups as this will result in duplicate audit logs.
For more information, see [Database-Level Audit Action
Groups](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-action-groups).
For Database auditing policy, specific Actions can also be specified (note
that Actions cannot be specified for Server auditing policy). The
supported actions to audit are:
SELECT
UPDATE
INSERT
DELETE
EXECUTE
RECEIVE
REFERENCES
The general form for defining an action to be audited is:
{action} ON {object} BY {principal}
Note that <object> in the above format can refer to an object like a
table, view, or stored procedure, or an entire database or schema. For the
latter cases, the forms DATABASE::{db_name} and SCHEMA::{schema_name} are
used, respectively.
For example:
SELECT on dbo.myTable by public
SELECT on DATABASE::myDatabase by public
SELECT on SCHEMA::mySchema by public
For more information, see [Database-Level Audit
Actions](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-actions)
:type audit_actions_and_groups: list[str]
:param storage_account_subscription_id: Specifies the blob storage
subscription Id.
:type storage_account_subscription_id: str
:param is_storage_secondary_key_in_use: Specifies whether
storageAccountAccessKey value is the storage's secondary key.
:type is_storage_secondary_key_in_use: bool
:param is_azure_monitor_target_enabled: Specifies whether audit events are
sent to Azure Monitor.
In order to send the events to Azure Monitor, specify 'state' as 'Enabled'
and 'isAzureMonitorTargetEnabled' as true.
When using REST API to configure auditing, Diagnostic Settings with
'SQLSecurityAuditEvents' diagnostic logs category on the database should
be also created.
Note that for server level audit you should use the 'master' database as
{databaseName}.
Diagnostic Settings URI format:
PUT
https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/providers/microsoft.insights/diagnosticSettings/{settingsName}?api-version=2017-05-01-preview
For more information, see [Diagnostic Settings REST
API](https://go.microsoft.com/fwlink/?linkid=2033207)
or [Diagnostic Settings
PowerShell](https://go.microsoft.com/fwlink/?linkid=2033043)
:type is_azure_monitor_target_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'state': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'BlobAuditingPolicyState'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'audit_actions_and_groups': {'key': 'properties.auditActionsAndGroups', 'type': '[str]'},
'storage_account_subscription_id': {'key': 'properties.storageAccountSubscriptionId', 'type': 'str'},
'is_storage_secondary_key_in_use': {'key': 'properties.isStorageSecondaryKeyInUse', 'type': 'bool'},
'is_azure_monitor_target_enabled': {'key': 'properties.isAzureMonitorTargetEnabled', 'type': 'bool'},
}
def __init__(self, *, state, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, audit_actions_and_groups=None, storage_account_subscription_id: str=None, is_storage_secondary_key_in_use: bool=None, is_azure_monitor_target_enabled: bool=None, **kwargs) -> None:
super(DatabaseBlobAuditingPolicy, self).__init__(**kwargs)
self.kind = None
self.state = state
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.audit_actions_and_groups = audit_actions_and_groups
self.storage_account_subscription_id = storage_account_subscription_id
self.is_storage_secondary_key_in_use = is_storage_secondary_key_in_use
self.is_azure_monitor_target_enabled = is_azure_monitor_target_enabled
class DatabaseOperation(ProxyResource):
"""A database operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar database_name: The name of the database the operation is being
performed on.
:vartype database_name: str
:ivar operation: The name of operation.
:vartype operation: str
:ivar operation_friendly_name: The friendly name of operation.
:vartype operation_friendly_name: str
:ivar percent_complete: The percentage of the operation completed.
:vartype percent_complete: int
:ivar server_name: The name of the server.
:vartype server_name: str
:ivar start_time: The operation start time.
:vartype start_time: datetime
:ivar state: The operation state. Possible values include: 'Pending',
'InProgress', 'Succeeded', 'Failed', 'CancelInProgress', 'Cancelled'
:vartype state: str or ~azure.mgmt.sql.models.ManagementOperationState
:ivar error_code: The operation error code.
:vartype error_code: int
:ivar error_description: The operation error description.
:vartype error_description: str
:ivar error_severity: The operation error severity.
:vartype error_severity: int
:ivar is_user_error: Whether or not the error is a user error.
:vartype is_user_error: bool
:ivar estimated_completion_time: The estimated completion time of the
operation.
:vartype estimated_completion_time: datetime
:ivar description: The operation description.
:vartype description: str
:ivar is_cancellable: Whether the operation can be cancelled.
:vartype is_cancellable: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'database_name': {'readonly': True},
'operation': {'readonly': True},
'operation_friendly_name': {'readonly': True},
'percent_complete': {'readonly': True},
'server_name': {'readonly': True},
'start_time': {'readonly': True},
'state': {'readonly': True},
'error_code': {'readonly': True},
'error_description': {'readonly': True},
'error_severity': {'readonly': True},
'is_user_error': {'readonly': True},
'estimated_completion_time': {'readonly': True},
'description': {'readonly': True},
'is_cancellable': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'operation': {'key': 'properties.operation', 'type': 'str'},
'operation_friendly_name': {'key': 'properties.operationFriendlyName', 'type': 'str'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'int'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
'error_code': {'key': 'properties.errorCode', 'type': 'int'},
'error_description': {'key': 'properties.errorDescription', 'type': 'str'},
'error_severity': {'key': 'properties.errorSeverity', 'type': 'int'},
'is_user_error': {'key': 'properties.isUserError', 'type': 'bool'},
'estimated_completion_time': {'key': 'properties.estimatedCompletionTime', 'type': 'iso-8601'},
'description': {'key': 'properties.description', 'type': 'str'},
'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'},
}
def __init__(self, **kwargs) -> None:
super(DatabaseOperation, self).__init__(**kwargs)
self.database_name = None
self.operation = None
self.operation_friendly_name = None
self.percent_complete = None
self.server_name = None
self.start_time = None
self.state = None
self.error_code = None
self.error_description = None
self.error_severity = None
self.is_user_error = None
self.estimated_completion_time = None
self.description = None
self.is_cancellable = None
class DatabaseSecurityAlertPolicy(ProxyResource):
"""Contains information about a database Threat Detection policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: The geo-location where the resource lives
:type location: str
:ivar kind: Resource kind.
:vartype kind: str
:param state: Required. Specifies the state of the policy. If state is
Enabled, storageEndpoint and storageAccountAccessKey are required.
Possible values include: 'New', 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.SecurityAlertPolicyState
:param disabled_alerts: Specifies the semicolon-separated list of alerts
that are disabled, or empty string to disable no alerts. Possible values:
Sql_Injection; Sql_Injection_Vulnerability; Access_Anomaly;
Data_Exfiltration; Unsafe_Action.
:type disabled_alerts: str
:param email_addresses: Specifies the semicolon-separated list of e-mail
addresses to which the alert is sent.
:type email_addresses: str
:param email_account_admins: Specifies that the alert is sent to the
account administrators. Possible values include: 'Enabled', 'Disabled'
:type email_account_admins: str or
~azure.mgmt.sql.models.SecurityAlertPolicyEmailAccountAdmins
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). This blob storage will hold all
Threat Detection audit logs. If state is Enabled, storageEndpoint is
required.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
Threat Detection audit storage account. If state is Enabled,
storageAccountAccessKey is required.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the Threat
Detection audit logs.
:type retention_days: int
:param use_server_default: Specifies whether to use the default server
policy. Possible values include: 'Enabled', 'Disabled'
:type use_server_default: str or
~azure.mgmt.sql.models.SecurityAlertPolicyUseServerDefault
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'state': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'SecurityAlertPolicyState'},
'disabled_alerts': {'key': 'properties.disabledAlerts', 'type': 'str'},
'email_addresses': {'key': 'properties.emailAddresses', 'type': 'str'},
'email_account_admins': {'key': 'properties.emailAccountAdmins', 'type': 'SecurityAlertPolicyEmailAccountAdmins'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'use_server_default': {'key': 'properties.useServerDefault', 'type': 'SecurityAlertPolicyUseServerDefault'},
}
def __init__(self, *, state, location: str=None, disabled_alerts: str=None, email_addresses: str=None, email_account_admins=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, use_server_default=None, **kwargs) -> None:
super(DatabaseSecurityAlertPolicy, self).__init__(**kwargs)
self.location = location
self.kind = None
self.state = state
self.disabled_alerts = disabled_alerts
self.email_addresses = email_addresses
self.email_account_admins = email_account_admins
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.use_server_default = use_server_default
class DatabaseUpdate(Model):
"""A database resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param sku: The name and tier of the SKU.
:type sku: ~azure.mgmt.sql.models.Sku
:param create_mode: Specifies the mode of database creation.
Default: regular database creation.
Copy: creates a database as a copy of an existing database.
sourceDatabaseId must be specified as the resource ID of the source
database.
Secondary: creates a database as a secondary replica of an existing
database. sourceDatabaseId must be specified as the resource ID of the
existing primary database.
PointInTimeRestore: Creates a database by restoring a point in time backup
of an existing database. sourceDatabaseId must be specified as the
resource ID of the existing database, and restorePointInTime must be
specified.
Recovery: Creates a database by restoring a geo-replicated backup.
sourceDatabaseId must be specified as the recoverable database resource ID
to restore.
Restore: Creates a database by restoring a backup of a deleted database.
sourceDatabaseId must be specified. If sourceDatabaseId is the database's
original resource ID, then sourceDatabaseDeletionDate must be specified.
Otherwise sourceDatabaseId must be the restorable dropped database
resource ID and sourceDatabaseDeletionDate is ignored. restorePointInTime
may also be specified to restore from an earlier point in time.
RestoreLongTermRetentionBackup: Creates a database by restoring from a
long term retention vault. recoveryServicesRecoveryPointResourceId must be
specified as the recovery point resource ID.
Copy, Secondary, and RestoreLongTermRetentionBackup are not supported for
DataWarehouse edition. Possible values include: 'Default', 'Copy',
'Secondary', 'PointInTimeRestore', 'Restore', 'Recovery',
'RestoreExternalBackup', 'RestoreExternalBackupSecondary',
'RestoreLongTermRetentionBackup', 'OnlineSecondary'
:type create_mode: str or ~azure.mgmt.sql.models.CreateMode
:param collation: The collation of the database.
:type collation: str
:param max_size_bytes: The max size of the database expressed in bytes.
:type max_size_bytes: long
:param sample_name: The name of the sample schema to apply when creating
this database. Possible values include: 'AdventureWorksLT',
'WideWorldImportersStd', 'WideWorldImportersFull'
:type sample_name: str or ~azure.mgmt.sql.models.SampleName
:param elastic_pool_id: The resource identifier of the elastic pool
containing this database.
:type elastic_pool_id: str
:param source_database_id: The resource identifier of the source database
associated with create operation of this database.
:type source_database_id: str
:ivar status: The status of the database. Possible values include:
'Online', 'Restoring', 'RecoveryPending', 'Recovering', 'Suspect',
'Offline', 'Standby', 'Shutdown', 'EmergencyMode', 'AutoClosed',
'Copying', 'Creating', 'Inaccessible', 'OfflineSecondary', 'Pausing',
'Paused', 'Resuming', 'Scaling', 'OfflineChangingDwPerformanceTiers',
'OnlineChangingDwPerformanceTiers', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.DatabaseStatus
:ivar database_id: The ID of the database.
:vartype database_id: str
:ivar creation_date: The creation date of the database (ISO8601 format).
:vartype creation_date: datetime
:ivar current_service_objective_name: The current service level objective
name of the database.
:vartype current_service_objective_name: str
:ivar requested_service_objective_name: The requested service level
objective name of the database.
:vartype requested_service_objective_name: str
:ivar default_secondary_location: The default secondary region for this
database.
:vartype default_secondary_location: str
:ivar failover_group_id: Failover Group resource identifier that this
database belongs to.
:vartype failover_group_id: str
:param restore_point_in_time: Specifies the point in time (ISO8601 format)
of the source database that will be restored to create the new database.
:type restore_point_in_time: datetime
:param source_database_deletion_date: Specifies the time that the database
was deleted.
:type source_database_deletion_date: datetime
:param recovery_services_recovery_point_id: The resource identifier of the
recovery point associated with create operation of this database.
:type recovery_services_recovery_point_id: str
:param long_term_retention_backup_resource_id: The resource identifier of
the long term retention backup associated with create operation of this
database.
:type long_term_retention_backup_resource_id: str
:param recoverable_database_id: The resource identifier of the recoverable
database associated with create operation of this database.
:type recoverable_database_id: str
:param restorable_dropped_database_id: The resource identifier of the
restorable dropped database associated with create operation of this
database.
:type restorable_dropped_database_id: str
:param catalog_collation: Collation of the metadata catalog. Possible
values include: 'DATABASE_DEFAULT', 'SQL_Latin1_General_CP1_CI_AS'
:type catalog_collation: str or
~azure.mgmt.sql.models.CatalogCollationType
:param zone_redundant: Whether or not this database is zone redundant,
which means the replicas of this database will be spread across multiple
availability zones.
:type zone_redundant: bool
:param license_type: The license type to apply for this database. Possible
values include: 'LicenseIncluded', 'BasePrice'
:type license_type: str or ~azure.mgmt.sql.models.DatabaseLicenseType
:ivar max_log_size_bytes: The max log size for this database.
:vartype max_log_size_bytes: long
:ivar earliest_restore_date: This records the earliest start date and time
that restore is available for this database (ISO8601 format).
:vartype earliest_restore_date: datetime
:param read_scale: If enabled, connections that have application intent
set to readonly in their connection string may be routed to a readonly
secondary replica. This property is only settable for Premium and Business
Critical databases. Possible values include: 'Enabled', 'Disabled'
:type read_scale: str or ~azure.mgmt.sql.models.DatabaseReadScale
:param read_replica_count: The number of readonly secondary replicas
associated with the database to which readonly application intent
connections may be routed. This property is only settable for Hyperscale
edition databases.
:type read_replica_count: int
:ivar current_sku: The name and tier of the SKU.
:vartype current_sku: ~azure.mgmt.sql.models.Sku
:param auto_pause_delay: Time in minutes after which database is
automatically paused. A value of -1 means that automatic pause is disabled
:type auto_pause_delay: int
:param min_capacity: Minimal capacity that database will always have
allocated, if not paused
:type min_capacity: float
:ivar paused_date: The date when database was paused by user configuration
or action (ISO8601 format). Null if the database is ready.
:vartype paused_date: datetime
:ivar resumed_date: The date when database was resumed by user action or
database login (ISO8601 format). Null if the database is paused.
:vartype resumed_date: datetime
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'status': {'readonly': True},
'database_id': {'readonly': True},
'creation_date': {'readonly': True},
'current_service_objective_name': {'readonly': True},
'requested_service_objective_name': {'readonly': True},
'default_secondary_location': {'readonly': True},
'failover_group_id': {'readonly': True},
'max_log_size_bytes': {'readonly': True},
'earliest_restore_date': {'readonly': True},
'current_sku': {'readonly': True},
'paused_date': {'readonly': True},
'resumed_date': {'readonly': True},
}
_attribute_map = {
'sku': {'key': 'sku', 'type': 'Sku'},
'create_mode': {'key': 'properties.createMode', 'type': 'str'},
'collation': {'key': 'properties.collation', 'type': 'str'},
'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'},
'sample_name': {'key': 'properties.sampleName', 'type': 'str'},
'elastic_pool_id': {'key': 'properties.elasticPoolId', 'type': 'str'},
'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'database_id': {'key': 'properties.databaseId', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'current_service_objective_name': {'key': 'properties.currentServiceObjectiveName', 'type': 'str'},
'requested_service_objective_name': {'key': 'properties.requestedServiceObjectiveName', 'type': 'str'},
'default_secondary_location': {'key': 'properties.defaultSecondaryLocation', 'type': 'str'},
'failover_group_id': {'key': 'properties.failoverGroupId', 'type': 'str'},
'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'},
'source_database_deletion_date': {'key': 'properties.sourceDatabaseDeletionDate', 'type': 'iso-8601'},
'recovery_services_recovery_point_id': {'key': 'properties.recoveryServicesRecoveryPointId', 'type': 'str'},
'long_term_retention_backup_resource_id': {'key': 'properties.longTermRetentionBackupResourceId', 'type': 'str'},
'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'},
'restorable_dropped_database_id': {'key': 'properties.restorableDroppedDatabaseId', 'type': 'str'},
'catalog_collation': {'key': 'properties.catalogCollation', 'type': 'str'},
'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'max_log_size_bytes': {'key': 'properties.maxLogSizeBytes', 'type': 'long'},
'earliest_restore_date': {'key': 'properties.earliestRestoreDate', 'type': 'iso-8601'},
'read_scale': {'key': 'properties.readScale', 'type': 'str'},
'read_replica_count': {'key': 'properties.readReplicaCount', 'type': 'int'},
'current_sku': {'key': 'properties.currentSku', 'type': 'Sku'},
'auto_pause_delay': {'key': 'properties.autoPauseDelay', 'type': 'int'},
'min_capacity': {'key': 'properties.minCapacity', 'type': 'float'},
'paused_date': {'key': 'properties.pausedDate', 'type': 'iso-8601'},
'resumed_date': {'key': 'properties.resumedDate', 'type': 'iso-8601'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, sku=None, create_mode=None, collation: str=None, max_size_bytes: int=None, sample_name=None, elastic_pool_id: str=None, source_database_id: str=None, restore_point_in_time=None, source_database_deletion_date=None, recovery_services_recovery_point_id: str=None, long_term_retention_backup_resource_id: str=None, recoverable_database_id: str=None, restorable_dropped_database_id: str=None, catalog_collation=None, zone_redundant: bool=None, license_type=None, read_scale=None, read_replica_count: int=None, auto_pause_delay: int=None, min_capacity: float=None, tags=None, **kwargs) -> None:
super(DatabaseUpdate, self).__init__(**kwargs)
self.sku = sku
self.create_mode = create_mode
self.collation = collation
self.max_size_bytes = max_size_bytes
self.sample_name = sample_name
self.elastic_pool_id = elastic_pool_id
self.source_database_id = source_database_id
self.status = None
self.database_id = None
self.creation_date = None
self.current_service_objective_name = None
self.requested_service_objective_name = None
self.default_secondary_location = None
self.failover_group_id = None
self.restore_point_in_time = restore_point_in_time
self.source_database_deletion_date = source_database_deletion_date
self.recovery_services_recovery_point_id = recovery_services_recovery_point_id
self.long_term_retention_backup_resource_id = long_term_retention_backup_resource_id
self.recoverable_database_id = recoverable_database_id
self.restorable_dropped_database_id = restorable_dropped_database_id
self.catalog_collation = catalog_collation
self.zone_redundant = zone_redundant
self.license_type = license_type
self.max_log_size_bytes = None
self.earliest_restore_date = None
self.read_scale = read_scale
self.read_replica_count = read_replica_count
self.current_sku = None
self.auto_pause_delay = auto_pause_delay
self.min_capacity = min_capacity
self.paused_date = None
self.resumed_date = None
self.tags = tags
class DatabaseUsage(Model):
"""The database usages.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The name of the usage metric.
:vartype name: str
:ivar resource_name: The name of the resource.
:vartype resource_name: str
:ivar display_name: The usage metric display name.
:vartype display_name: str
:ivar current_value: The current value of the usage metric.
:vartype current_value: float
:ivar limit: The current limit of the usage metric.
:vartype limit: float
:ivar unit: The units of the usage metric.
:vartype unit: str
:ivar next_reset_time: The next reset time for the usage metric (ISO8601
format).
:vartype next_reset_time: datetime
"""
_validation = {
'name': {'readonly': True},
'resource_name': {'readonly': True},
'display_name': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'unit': {'readonly': True},
'next_reset_time': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'resource_name': {'key': 'resourceName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'float'},
'limit': {'key': 'limit', 'type': 'float'},
'unit': {'key': 'unit', 'type': 'str'},
'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'},
}
def __init__(self, **kwargs) -> None:
super(DatabaseUsage, self).__init__(**kwargs)
self.name = None
self.resource_name = None
self.display_name = None
self.current_value = None
self.limit = None
self.unit = None
self.next_reset_time = None
class DatabaseVulnerabilityAssessment(ProxyResource):
"""A database vulnerability assessment.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param storage_container_path: A blob storage container path to hold the
scan results (e.g. https://myStorage.blob.core.windows.net/VaScans/). It
is required if server level vulnerability assessment policy doesn't set
:type storage_container_path: str
:param storage_container_sas_key: A shared access signature (SAS Key) that
has write access to the blob container specified in 'storageContainerPath'
parameter. If 'storageAccountAccessKey' isn't specified,
StorageContainerSasKey is required.
:type storage_container_sas_key: str
:param storage_account_access_key: Specifies the identifier key of the
storage account for vulnerability assessment scan results. If
'StorageContainerSasKey' isn't specified, storageAccountAccessKey is
required.
:type storage_account_access_key: str
:param recurring_scans: The recurring scans settings
:type recurring_scans:
~azure.mgmt.sql.models.VulnerabilityAssessmentRecurringScansProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_container_path': {'key': 'properties.storageContainerPath', 'type': 'str'},
'storage_container_sas_key': {'key': 'properties.storageContainerSasKey', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'recurring_scans': {'key': 'properties.recurringScans', 'type': 'VulnerabilityAssessmentRecurringScansProperties'},
}
def __init__(self, *, storage_container_path: str=None, storage_container_sas_key: str=None, storage_account_access_key: str=None, recurring_scans=None, **kwargs) -> None:
super(DatabaseVulnerabilityAssessment, self).__init__(**kwargs)
self.storage_container_path = storage_container_path
self.storage_container_sas_key = storage_container_sas_key
self.storage_account_access_key = storage_account_access_key
self.recurring_scans = recurring_scans
class DatabaseVulnerabilityAssessmentRuleBaseline(ProxyResource):
"""A database vulnerability assessment rule baseline.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param baseline_results: Required. The rule baseline result
:type baseline_results:
list[~azure.mgmt.sql.models.DatabaseVulnerabilityAssessmentRuleBaselineItem]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'baseline_results': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'baseline_results': {'key': 'properties.baselineResults', 'type': '[DatabaseVulnerabilityAssessmentRuleBaselineItem]'},
}
def __init__(self, *, baseline_results, **kwargs) -> None:
super(DatabaseVulnerabilityAssessmentRuleBaseline, self).__init__(**kwargs)
self.baseline_results = baseline_results
class DatabaseVulnerabilityAssessmentRuleBaselineItem(Model):
"""Properties for an Azure SQL Database Vulnerability Assessment rule
baseline's result.
All required parameters must be populated in order to send to Azure.
:param result: Required. The rule baseline result
:type result: list[str]
"""
_validation = {
'result': {'required': True},
}
_attribute_map = {
'result': {'key': 'result', 'type': '[str]'},
}
def __init__(self, *, result, **kwargs) -> None:
super(DatabaseVulnerabilityAssessmentRuleBaselineItem, self).__init__(**kwargs)
self.result = result
class DatabaseVulnerabilityAssessmentScansExport(ProxyResource):
"""A database Vulnerability Assessment scan export resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar exported_report_location: Location of the exported report (e.g.
https://myStorage.blob.core.windows.net/VaScans/scans/serverName/databaseName/scan_scanId.xlsx).
:vartype exported_report_location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'exported_report_location': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'exported_report_location': {'key': 'properties.exportedReportLocation', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(DatabaseVulnerabilityAssessmentScansExport, self).__init__(**kwargs)
self.exported_report_location = None
class DataMaskingPolicy(ProxyResource):
"""Represents a database data masking policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param data_masking_state: Required. The state of the data masking policy.
Possible values include: 'Disabled', 'Enabled'
:type data_masking_state: str or ~azure.mgmt.sql.models.DataMaskingState
:param exempt_principals: The list of the exempt principals. Specifies the
semicolon-separated list of database users for which the data masking
policy does not apply. The specified users receive data results without
masking for all of the database queries.
:type exempt_principals: str
:ivar application_principals: The list of the application principals. This
is a legacy parameter and is no longer used.
:vartype application_principals: str
:ivar masking_level: The masking level. This is a legacy parameter and is
no longer used.
:vartype masking_level: str
:ivar location: The location of the data masking policy.
:vartype location: str
:ivar kind: The kind of data masking policy. Metadata, used for Azure
portal.
:vartype kind: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'data_masking_state': {'required': True},
'application_principals': {'readonly': True},
'masking_level': {'readonly': True},
'location': {'readonly': True},
'kind': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data_masking_state': {'key': 'properties.dataMaskingState', 'type': 'DataMaskingState'},
'exempt_principals': {'key': 'properties.exemptPrincipals', 'type': 'str'},
'application_principals': {'key': 'properties.applicationPrincipals', 'type': 'str'},
'masking_level': {'key': 'properties.maskingLevel', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
def __init__(self, *, data_masking_state, exempt_principals: str=None, **kwargs) -> None:
super(DataMaskingPolicy, self).__init__(**kwargs)
self.data_masking_state = data_masking_state
self.exempt_principals = exempt_principals
self.application_principals = None
self.masking_level = None
self.location = None
self.kind = None
class DataMaskingRule(ProxyResource):
"""Represents a database data masking rule.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar data_masking_rule_id: The rule Id.
:vartype data_masking_rule_id: str
:param alias_name: The alias name. This is a legacy parameter and is no
longer used.
:type alias_name: str
:param rule_state: The rule state. Used to delete a rule. To delete an
existing rule, specify the schemaName, tableName, columnName,
maskingFunction, and specify ruleState as disabled. However, if the rule
doesn't already exist, the rule will be created with ruleState set to
enabled, regardless of the provided value of ruleState. Possible values
include: 'Disabled', 'Enabled'
:type rule_state: str or ~azure.mgmt.sql.models.DataMaskingRuleState
:param schema_name: Required. The schema name on which the data masking
rule is applied.
:type schema_name: str
:param table_name: Required. The table name on which the data masking rule
is applied.
:type table_name: str
:param column_name: Required. The column name on which the data masking
rule is applied.
:type column_name: str
:param masking_function: Required. The masking function that is used for
the data masking rule. Possible values include: 'Default', 'CCN', 'Email',
'Number', 'SSN', 'Text'
:type masking_function: str or ~azure.mgmt.sql.models.DataMaskingFunction
:param number_from: The numberFrom property of the masking rule. Required
if maskingFunction is set to Number, otherwise this parameter will be
ignored.
:type number_from: str
:param number_to: The numberTo property of the data masking rule. Required
if maskingFunction is set to Number, otherwise this parameter will be
ignored.
:type number_to: str
:param prefix_size: If maskingFunction is set to Text, the number of
characters to show unmasked in the beginning of the string. Otherwise,
this parameter will be ignored.
:type prefix_size: str
:param suffix_size: If maskingFunction is set to Text, the number of
characters to show unmasked at the end of the string. Otherwise, this
parameter will be ignored.
:type suffix_size: str
:param replacement_string: If maskingFunction is set to Text, the
character to use for masking the unexposed part of the string. Otherwise,
this parameter will be ignored.
:type replacement_string: str
:ivar location: The location of the data masking rule.
:vartype location: str
:ivar kind: The kind of Data Masking Rule. Metadata, used for Azure
portal.
:vartype kind: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'data_masking_rule_id': {'readonly': True},
'schema_name': {'required': True},
'table_name': {'required': True},
'column_name': {'required': True},
'masking_function': {'required': True},
'location': {'readonly': True},
'kind': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'data_masking_rule_id': {'key': 'properties.id', 'type': 'str'},
'alias_name': {'key': 'properties.aliasName', 'type': 'str'},
'rule_state': {'key': 'properties.ruleState', 'type': 'DataMaskingRuleState'},
'schema_name': {'key': 'properties.schemaName', 'type': 'str'},
'table_name': {'key': 'properties.tableName', 'type': 'str'},
'column_name': {'key': 'properties.columnName', 'type': 'str'},
'masking_function': {'key': 'properties.maskingFunction', 'type': 'DataMaskingFunction'},
'number_from': {'key': 'properties.numberFrom', 'type': 'str'},
'number_to': {'key': 'properties.numberTo', 'type': 'str'},
'prefix_size': {'key': 'properties.prefixSize', 'type': 'str'},
'suffix_size': {'key': 'properties.suffixSize', 'type': 'str'},
'replacement_string': {'key': 'properties.replacementString', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
def __init__(self, *, schema_name: str, table_name: str, column_name: str, masking_function, alias_name: str=None, rule_state=None, number_from: str=None, number_to: str=None, prefix_size: str=None, suffix_size: str=None, replacement_string: str=None, **kwargs) -> None:
super(DataMaskingRule, self).__init__(**kwargs)
self.data_masking_rule_id = None
self.alias_name = alias_name
self.rule_state = rule_state
self.schema_name = schema_name
self.table_name = table_name
self.column_name = column_name
self.masking_function = masking_function
self.number_from = number_from
self.number_to = number_to
self.prefix_size = prefix_size
self.suffix_size = suffix_size
self.replacement_string = replacement_string
self.location = None
self.kind = None
class EditionCapability(Model):
"""The edition capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The database edition name.
:vartype name: str
:ivar supported_service_level_objectives: The list of supported service
objectives for the edition.
:vartype supported_service_level_objectives:
list[~azure.mgmt.sql.models.ServiceObjectiveCapability]
:ivar zone_redundant: Whether or not zone redundancy is supported for the
edition.
:vartype zone_redundant: bool
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'supported_service_level_objectives': {'readonly': True},
'zone_redundant': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_service_level_objectives': {'key': 'supportedServiceLevelObjectives', 'type': '[ServiceObjectiveCapability]'},
'zone_redundant': {'key': 'zoneRedundant', 'type': 'bool'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(EditionCapability, self).__init__(**kwargs)
self.name = None
self.supported_service_level_objectives = None
self.zone_redundant = None
self.status = None
self.reason = reason
class ElasticPool(TrackedResource):
"""An elastic pool.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param sku: The elastic pool SKU.
The list of SKUs may vary by region and support offer. To determine the
SKUs (including the SKU name, tier/edition, family, and capacity) that are
available to your subscription in an Azure region, use the
`Capabilities_ListByLocation` REST API or the following command:
```azurecli
az sql elastic-pool list-editions -l <location> -o table
````
:type sku: ~azure.mgmt.sql.models.Sku
:ivar kind: Kind of elastic pool. This is metadata used for the Azure
portal experience.
:vartype kind: str
:ivar state: The state of the elastic pool. Possible values include:
'Creating', 'Ready', 'Disabled'
:vartype state: str or ~azure.mgmt.sql.models.ElasticPoolState
:ivar creation_date: The creation date of the elastic pool (ISO8601
format).
:vartype creation_date: datetime
:param max_size_bytes: The storage limit for the database elastic pool in
bytes.
:type max_size_bytes: long
:param per_database_settings: The per database settings for the elastic
pool.
:type per_database_settings:
~azure.mgmt.sql.models.ElasticPoolPerDatabaseSettings
:param zone_redundant: Whether or not this elastic pool is zone redundant,
which means the replicas of this elastic pool will be spread across
multiple availability zones.
:type zone_redundant: bool
:param license_type: The license type to apply for this elastic pool.
Possible values include: 'LicenseIncluded', 'BasePrice'
:type license_type: str or ~azure.mgmt.sql.models.ElasticPoolLicenseType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'kind': {'readonly': True},
'state': {'readonly': True},
'creation_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'kind': {'key': 'kind', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'},
'per_database_settings': {'key': 'properties.perDatabaseSettings', 'type': 'ElasticPoolPerDatabaseSettings'},
'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
}
def __init__(self, *, location: str, tags=None, sku=None, max_size_bytes: int=None, per_database_settings=None, zone_redundant: bool=None, license_type=None, **kwargs) -> None:
super(ElasticPool, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.kind = None
self.state = None
self.creation_date = None
self.max_size_bytes = max_size_bytes
self.per_database_settings = per_database_settings
self.zone_redundant = zone_redundant
self.license_type = license_type
class ElasticPoolActivity(ProxyResource):
"""Represents the activity on an elastic pool.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: The geo-location where the resource lives
:type location: str
:ivar end_time: The time the operation finished (ISO8601 format).
:vartype end_time: datetime
:ivar error_code: The error code if available.
:vartype error_code: int
:ivar error_message: The error message if available.
:vartype error_message: str
:ivar error_severity: The error severity if available.
:vartype error_severity: int
:ivar operation: The operation name.
:vartype operation: str
:ivar operation_id: The unique operation ID.
:vartype operation_id: str
:ivar percent_complete: The percentage complete if available.
:vartype percent_complete: int
:ivar requested_database_dtu_max: The requested max DTU per database if
available.
:vartype requested_database_dtu_max: int
:ivar requested_database_dtu_min: The requested min DTU per database if
available.
:vartype requested_database_dtu_min: int
:ivar requested_dtu: The requested DTU for the pool if available.
:vartype requested_dtu: int
:ivar requested_elastic_pool_name: The requested name for the elastic pool
if available.
:vartype requested_elastic_pool_name: str
:ivar requested_storage_limit_in_gb: The requested storage limit for the
pool in GB if available.
:vartype requested_storage_limit_in_gb: long
:ivar elastic_pool_name: The name of the elastic pool.
:vartype elastic_pool_name: str
:ivar server_name: The name of the server the elastic pool is in.
:vartype server_name: str
:ivar start_time: The time the operation started (ISO8601 format).
:vartype start_time: datetime
:ivar state: The current state of the operation.
:vartype state: str
:ivar requested_storage_limit_in_mb: The requested storage limit in MB.
:vartype requested_storage_limit_in_mb: int
:ivar requested_database_dtu_guarantee: The requested per database DTU
guarantee.
:vartype requested_database_dtu_guarantee: int
:ivar requested_database_dtu_cap: The requested per database DTU cap.
:vartype requested_database_dtu_cap: int
:ivar requested_dtu_guarantee: The requested DTU guarantee.
:vartype requested_dtu_guarantee: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'end_time': {'readonly': True},
'error_code': {'readonly': True},
'error_message': {'readonly': True},
'error_severity': {'readonly': True},
'operation': {'readonly': True},
'operation_id': {'readonly': True},
'percent_complete': {'readonly': True},
'requested_database_dtu_max': {'readonly': True},
'requested_database_dtu_min': {'readonly': True},
'requested_dtu': {'readonly': True},
'requested_elastic_pool_name': {'readonly': True},
'requested_storage_limit_in_gb': {'readonly': True},
'elastic_pool_name': {'readonly': True},
'server_name': {'readonly': True},
'start_time': {'readonly': True},
'state': {'readonly': True},
'requested_storage_limit_in_mb': {'readonly': True},
'requested_database_dtu_guarantee': {'readonly': True},
'requested_database_dtu_cap': {'readonly': True},
'requested_dtu_guarantee': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'error_code': {'key': 'properties.errorCode', 'type': 'int'},
'error_message': {'key': 'properties.errorMessage', 'type': 'str'},
'error_severity': {'key': 'properties.errorSeverity', 'type': 'int'},
'operation': {'key': 'properties.operation', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'int'},
'requested_database_dtu_max': {'key': 'properties.requestedDatabaseDtuMax', 'type': 'int'},
'requested_database_dtu_min': {'key': 'properties.requestedDatabaseDtuMin', 'type': 'int'},
'requested_dtu': {'key': 'properties.requestedDtu', 'type': 'int'},
'requested_elastic_pool_name': {'key': 'properties.requestedElasticPoolName', 'type': 'str'},
'requested_storage_limit_in_gb': {'key': 'properties.requestedStorageLimitInGB', 'type': 'long'},
'elastic_pool_name': {'key': 'properties.elasticPoolName', 'type': 'str'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
'requested_storage_limit_in_mb': {'key': 'properties.requestedStorageLimitInMB', 'type': 'int'},
'requested_database_dtu_guarantee': {'key': 'properties.requestedDatabaseDtuGuarantee', 'type': 'int'},
'requested_database_dtu_cap': {'key': 'properties.requestedDatabaseDtuCap', 'type': 'int'},
'requested_dtu_guarantee': {'key': 'properties.requestedDtuGuarantee', 'type': 'int'},
}
def __init__(self, *, location: str=None, **kwargs) -> None:
super(ElasticPoolActivity, self).__init__(**kwargs)
self.location = location
self.end_time = None
self.error_code = None
self.error_message = None
self.error_severity = None
self.operation = None
self.operation_id = None
self.percent_complete = None
self.requested_database_dtu_max = None
self.requested_database_dtu_min = None
self.requested_dtu = None
self.requested_elastic_pool_name = None
self.requested_storage_limit_in_gb = None
self.elastic_pool_name = None
self.server_name = None
self.start_time = None
self.state = None
self.requested_storage_limit_in_mb = None
self.requested_database_dtu_guarantee = None
self.requested_database_dtu_cap = None
self.requested_dtu_guarantee = None
class ElasticPoolDatabaseActivity(ProxyResource):
"""Represents the activity on an elastic pool.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: The geo-location where the resource lives
:type location: str
:ivar database_name: The database name.
:vartype database_name: str
:ivar end_time: The time the operation finished (ISO8601 format).
:vartype end_time: datetime
:ivar error_code: The error code if available.
:vartype error_code: int
:ivar error_message: The error message if available.
:vartype error_message: str
:ivar error_severity: The error severity if available.
:vartype error_severity: int
:ivar operation: The operation name.
:vartype operation: str
:ivar operation_id: The unique operation ID.
:vartype operation_id: str
:ivar percent_complete: The percentage complete if available.
:vartype percent_complete: int
:ivar requested_elastic_pool_name: The name for the elastic pool the
database is moving into if available.
:vartype requested_elastic_pool_name: str
:ivar current_elastic_pool_name: The name of the current elastic pool the
database is in if available.
:vartype current_elastic_pool_name: str
:ivar current_service_objective: The name of the current service objective
if available.
:vartype current_service_objective: str
:ivar requested_service_objective: The name of the requested service
objective if available.
:vartype requested_service_objective: str
:ivar server_name: The name of the server the elastic pool is in.
:vartype server_name: str
:ivar start_time: The time the operation started (ISO8601 format).
:vartype start_time: datetime
:ivar state: The current state of the operation.
:vartype state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'database_name': {'readonly': True},
'end_time': {'readonly': True},
'error_code': {'readonly': True},
'error_message': {'readonly': True},
'error_severity': {'readonly': True},
'operation': {'readonly': True},
'operation_id': {'readonly': True},
'percent_complete': {'readonly': True},
'requested_elastic_pool_name': {'readonly': True},
'current_elastic_pool_name': {'readonly': True},
'current_service_objective': {'readonly': True},
'requested_service_objective': {'readonly': True},
'server_name': {'readonly': True},
'start_time': {'readonly': True},
'state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'error_code': {'key': 'properties.errorCode', 'type': 'int'},
'error_message': {'key': 'properties.errorMessage', 'type': 'str'},
'error_severity': {'key': 'properties.errorSeverity', 'type': 'int'},
'operation': {'key': 'properties.operation', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'int'},
'requested_elastic_pool_name': {'key': 'properties.requestedElasticPoolName', 'type': 'str'},
'current_elastic_pool_name': {'key': 'properties.currentElasticPoolName', 'type': 'str'},
'current_service_objective': {'key': 'properties.currentServiceObjective', 'type': 'str'},
'requested_service_objective': {'key': 'properties.requestedServiceObjective', 'type': 'str'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
}
def __init__(self, *, location: str=None, **kwargs) -> None:
super(ElasticPoolDatabaseActivity, self).__init__(**kwargs)
self.location = location
self.database_name = None
self.end_time = None
self.error_code = None
self.error_message = None
self.error_severity = None
self.operation = None
self.operation_id = None
self.percent_complete = None
self.requested_elastic_pool_name = None
self.current_elastic_pool_name = None
self.current_service_objective = None
self.requested_service_objective = None
self.server_name = None
self.start_time = None
self.state = None
class ElasticPoolEditionCapability(Model):
"""The elastic pool edition capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The elastic pool edition name.
:vartype name: str
:ivar supported_elastic_pool_performance_levels: The list of supported
elastic pool DTU levels for the edition.
:vartype supported_elastic_pool_performance_levels:
list[~azure.mgmt.sql.models.ElasticPoolPerformanceLevelCapability]
:ivar zone_redundant: Whether or not zone redundancy is supported for the
edition.
:vartype zone_redundant: bool
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'supported_elastic_pool_performance_levels': {'readonly': True},
'zone_redundant': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_elastic_pool_performance_levels': {'key': 'supportedElasticPoolPerformanceLevels', 'type': '[ElasticPoolPerformanceLevelCapability]'},
'zone_redundant': {'key': 'zoneRedundant', 'type': 'bool'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ElasticPoolEditionCapability, self).__init__(**kwargs)
self.name = None
self.supported_elastic_pool_performance_levels = None
self.zone_redundant = None
self.status = None
self.reason = reason
class ElasticPoolOperation(ProxyResource):
"""A elastic pool operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar elastic_pool_name: The name of the elastic pool the operation is
being performed on.
:vartype elastic_pool_name: str
:ivar operation: The name of operation.
:vartype operation: str
:ivar operation_friendly_name: The friendly name of operation.
:vartype operation_friendly_name: str
:ivar percent_complete: The percentage of the operation completed.
:vartype percent_complete: int
:ivar server_name: The name of the server.
:vartype server_name: str
:ivar start_time: The operation start time.
:vartype start_time: datetime
:ivar state: The operation state.
:vartype state: str
:ivar error_code: The operation error code.
:vartype error_code: int
:ivar error_description: The operation error description.
:vartype error_description: str
:ivar error_severity: The operation error severity.
:vartype error_severity: int
:ivar is_user_error: Whether or not the error is a user error.
:vartype is_user_error: bool
:ivar estimated_completion_time: The estimated completion time of the
operation.
:vartype estimated_completion_time: datetime
:ivar description: The operation description.
:vartype description: str
:ivar is_cancellable: Whether the operation can be cancelled.
:vartype is_cancellable: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'elastic_pool_name': {'readonly': True},
'operation': {'readonly': True},
'operation_friendly_name': {'readonly': True},
'percent_complete': {'readonly': True},
'server_name': {'readonly': True},
'start_time': {'readonly': True},
'state': {'readonly': True},
'error_code': {'readonly': True},
'error_description': {'readonly': True},
'error_severity': {'readonly': True},
'is_user_error': {'readonly': True},
'estimated_completion_time': {'readonly': True},
'description': {'readonly': True},
'is_cancellable': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'elastic_pool_name': {'key': 'properties.elasticPoolName', 'type': 'str'},
'operation': {'key': 'properties.operation', 'type': 'str'},
'operation_friendly_name': {'key': 'properties.operationFriendlyName', 'type': 'str'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'int'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
'error_code': {'key': 'properties.errorCode', 'type': 'int'},
'error_description': {'key': 'properties.errorDescription', 'type': 'str'},
'error_severity': {'key': 'properties.errorSeverity', 'type': 'int'},
'is_user_error': {'key': 'properties.isUserError', 'type': 'bool'},
'estimated_completion_time': {'key': 'properties.estimatedCompletionTime', 'type': 'iso-8601'},
'description': {'key': 'properties.description', 'type': 'str'},
'is_cancellable': {'key': 'properties.isCancellable', 'type': 'bool'},
}
def __init__(self, **kwargs) -> None:
super(ElasticPoolOperation, self).__init__(**kwargs)
self.elastic_pool_name = None
self.operation = None
self.operation_friendly_name = None
self.percent_complete = None
self.server_name = None
self.start_time = None
self.state = None
self.error_code = None
self.error_description = None
self.error_severity = None
self.is_user_error = None
self.estimated_completion_time = None
self.description = None
self.is_cancellable = None
class ElasticPoolPerDatabaseMaxPerformanceLevelCapability(Model):
"""The max per-database performance level capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar limit: The maximum performance level per database.
:vartype limit: float
:ivar unit: Unit type used to measure performance level. Possible values
include: 'DTU', 'VCores'
:vartype unit: str or ~azure.mgmt.sql.models.PerformanceLevelUnit
:ivar supported_per_database_min_performance_levels: The list of supported
min database performance levels.
:vartype supported_per_database_min_performance_levels:
list[~azure.mgmt.sql.models.ElasticPoolPerDatabaseMinPerformanceLevelCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'limit': {'readonly': True},
'unit': {'readonly': True},
'supported_per_database_min_performance_levels': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'limit': {'key': 'limit', 'type': 'float'},
'unit': {'key': 'unit', 'type': 'str'},
'supported_per_database_min_performance_levels': {'key': 'supportedPerDatabaseMinPerformanceLevels', 'type': '[ElasticPoolPerDatabaseMinPerformanceLevelCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ElasticPoolPerDatabaseMaxPerformanceLevelCapability, self).__init__(**kwargs)
self.limit = None
self.unit = None
self.supported_per_database_min_performance_levels = None
self.status = None
self.reason = reason
class ElasticPoolPerDatabaseMinPerformanceLevelCapability(Model):
"""The minimum per-database performance level capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar limit: The minimum performance level per database.
:vartype limit: float
:ivar unit: Unit type used to measure performance level. Possible values
include: 'DTU', 'VCores'
:vartype unit: str or ~azure.mgmt.sql.models.PerformanceLevelUnit
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'limit': {'readonly': True},
'unit': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'limit': {'key': 'limit', 'type': 'float'},
'unit': {'key': 'unit', 'type': 'str'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ElasticPoolPerDatabaseMinPerformanceLevelCapability, self).__init__(**kwargs)
self.limit = None
self.unit = None
self.status = None
self.reason = reason
class ElasticPoolPerDatabaseSettings(Model):
"""Per database settings of an elastic pool.
:param min_capacity: The minimum capacity all databases are guaranteed.
:type min_capacity: float
:param max_capacity: The maximum capacity any one database can consume.
:type max_capacity: float
"""
_attribute_map = {
'min_capacity': {'key': 'minCapacity', 'type': 'float'},
'max_capacity': {'key': 'maxCapacity', 'type': 'float'},
}
def __init__(self, *, min_capacity: float=None, max_capacity: float=None, **kwargs) -> None:
super(ElasticPoolPerDatabaseSettings, self).__init__(**kwargs)
self.min_capacity = min_capacity
self.max_capacity = max_capacity
class ElasticPoolPerformanceLevelCapability(Model):
"""The Elastic Pool performance level capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar performance_level: The performance level for the pool.
:vartype performance_level:
~azure.mgmt.sql.models.PerformanceLevelCapability
:ivar sku: The sku.
:vartype sku: ~azure.mgmt.sql.models.Sku
:ivar supported_license_types: List of supported license types.
:vartype supported_license_types:
list[~azure.mgmt.sql.models.LicenseTypeCapability]
:ivar max_database_count: The maximum number of databases supported.
:vartype max_database_count: int
:ivar included_max_size: The included (free) max size for this performance
level.
:vartype included_max_size: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar supported_max_sizes: The list of supported max sizes.
:vartype supported_max_sizes:
list[~azure.mgmt.sql.models.MaxSizeRangeCapability]
:ivar supported_per_database_max_sizes: The list of supported per database
max sizes.
:vartype supported_per_database_max_sizes:
list[~azure.mgmt.sql.models.MaxSizeRangeCapability]
:ivar supported_per_database_max_performance_levels: The list of supported
per database max performance levels.
:vartype supported_per_database_max_performance_levels:
list[~azure.mgmt.sql.models.ElasticPoolPerDatabaseMaxPerformanceLevelCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'performance_level': {'readonly': True},
'sku': {'readonly': True},
'supported_license_types': {'readonly': True},
'max_database_count': {'readonly': True},
'included_max_size': {'readonly': True},
'supported_max_sizes': {'readonly': True},
'supported_per_database_max_sizes': {'readonly': True},
'supported_per_database_max_performance_levels': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'performance_level': {'key': 'performanceLevel', 'type': 'PerformanceLevelCapability'},
'sku': {'key': 'sku', 'type': 'Sku'},
'supported_license_types': {'key': 'supportedLicenseTypes', 'type': '[LicenseTypeCapability]'},
'max_database_count': {'key': 'maxDatabaseCount', 'type': 'int'},
'included_max_size': {'key': 'includedMaxSize', 'type': 'MaxSizeCapability'},
'supported_max_sizes': {'key': 'supportedMaxSizes', 'type': '[MaxSizeRangeCapability]'},
'supported_per_database_max_sizes': {'key': 'supportedPerDatabaseMaxSizes', 'type': '[MaxSizeRangeCapability]'},
'supported_per_database_max_performance_levels': {'key': 'supportedPerDatabaseMaxPerformanceLevels', 'type': '[ElasticPoolPerDatabaseMaxPerformanceLevelCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ElasticPoolPerformanceLevelCapability, self).__init__(**kwargs)
self.performance_level = None
self.sku = None
self.supported_license_types = None
self.max_database_count = None
self.included_max_size = None
self.supported_max_sizes = None
self.supported_per_database_max_sizes = None
self.supported_per_database_max_performance_levels = None
self.status = None
self.reason = reason
class ElasticPoolUpdate(Model):
"""An elastic pool update.
:param sku:
:type sku: ~azure.mgmt.sql.models.Sku
:param max_size_bytes: The storage limit for the database elastic pool in
bytes.
:type max_size_bytes: long
:param per_database_settings: The per database settings for the elastic
pool.
:type per_database_settings:
~azure.mgmt.sql.models.ElasticPoolPerDatabaseSettings
:param zone_redundant: Whether or not this elastic pool is zone redundant,
which means the replicas of this elastic pool will be spread across
multiple availability zones.
:type zone_redundant: bool
:param license_type: The license type to apply for this elastic pool.
Possible values include: 'LicenseIncluded', 'BasePrice'
:type license_type: str or ~azure.mgmt.sql.models.ElasticPoolLicenseType
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'sku': {'key': 'sku', 'type': 'Sku'},
'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'long'},
'per_database_settings': {'key': 'properties.perDatabaseSettings', 'type': 'ElasticPoolPerDatabaseSettings'},
'zone_redundant': {'key': 'properties.zoneRedundant', 'type': 'bool'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, sku=None, max_size_bytes: int=None, per_database_settings=None, zone_redundant: bool=None, license_type=None, tags=None, **kwargs) -> None:
super(ElasticPoolUpdate, self).__init__(**kwargs)
self.sku = sku
self.max_size_bytes = max_size_bytes
self.per_database_settings = per_database_settings
self.zone_redundant = zone_redundant
self.license_type = license_type
self.tags = tags
class EncryptionProtector(ProxyResource):
"""The server encryption protector.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Kind of encryption protector. This is metadata used for the
Azure portal experience.
:vartype kind: str
:ivar location: Resource location.
:vartype location: str
:ivar subregion: Subregion of the encryption protector.
:vartype subregion: str
:param server_key_name: The name of the server key.
:type server_key_name: str
:param server_key_type: Required. The encryption protector type like
'ServiceManaged', 'AzureKeyVault'. Possible values include:
'ServiceManaged', 'AzureKeyVault'
:type server_key_type: str or ~azure.mgmt.sql.models.ServerKeyType
:ivar uri: The URI of the server key.
:vartype uri: str
:ivar thumbprint: Thumbprint of the server key.
:vartype thumbprint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'location': {'readonly': True},
'subregion': {'readonly': True},
'server_key_type': {'required': True},
'uri': {'readonly': True},
'thumbprint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'subregion': {'key': 'properties.subregion', 'type': 'str'},
'server_key_name': {'key': 'properties.serverKeyName', 'type': 'str'},
'server_key_type': {'key': 'properties.serverKeyType', 'type': 'str'},
'uri': {'key': 'properties.uri', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
}
def __init__(self, *, server_key_type, server_key_name: str=None, **kwargs) -> None:
super(EncryptionProtector, self).__init__(**kwargs)
self.kind = None
self.location = None
self.subregion = None
self.server_key_name = server_key_name
self.server_key_type = server_key_type
self.uri = None
self.thumbprint = None
class ExportRequest(Model):
"""Export database parameters.
All required parameters must be populated in order to send to Azure.
:param storage_key_type: Required. The type of the storage key to use.
Possible values include: 'StorageAccessKey', 'SharedAccessKey'
:type storage_key_type: str or ~azure.mgmt.sql.models.StorageKeyType
:param storage_key: Required. The storage key to use. If storage key type
is SharedAccessKey, it must be preceded with a "?."
:type storage_key: str
:param storage_uri: Required. The storage uri to use.
:type storage_uri: str
:param administrator_login: Required. The name of the SQL administrator.
:type administrator_login: str
:param administrator_login_password: Required. The password of the SQL
administrator.
:type administrator_login_password: str
:param authentication_type: The authentication type. Possible values
include: 'SQL', 'ADPassword'. Default value: "SQL" .
:type authentication_type: str or
~azure.mgmt.sql.models.AuthenticationType
"""
_validation = {
'storage_key_type': {'required': True},
'storage_key': {'required': True},
'storage_uri': {'required': True},
'administrator_login': {'required': True},
'administrator_login_password': {'required': True},
}
_attribute_map = {
'storage_key_type': {'key': 'storageKeyType', 'type': 'StorageKeyType'},
'storage_key': {'key': 'storageKey', 'type': 'str'},
'storage_uri': {'key': 'storageUri', 'type': 'str'},
'administrator_login': {'key': 'administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'administratorLoginPassword', 'type': 'str'},
'authentication_type': {'key': 'authenticationType', 'type': 'AuthenticationType'},
}
def __init__(self, *, storage_key_type, storage_key: str, storage_uri: str, administrator_login: str, administrator_login_password: str, authentication_type="SQL", **kwargs) -> None:
super(ExportRequest, self).__init__(**kwargs)
self.storage_key_type = storage_key_type
self.storage_key = storage_key
self.storage_uri = storage_uri
self.administrator_login = administrator_login
self.administrator_login_password = administrator_login_password
self.authentication_type = authentication_type
class ExtendedDatabaseBlobAuditingPolicy(ProxyResource):
"""An extended database blob auditing policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param predicate_expression: Specifies condition of where clause when
creating an audit.
:type predicate_expression: str
:param state: Required. Specifies the state of the policy. If state is
Enabled, storageEndpoint or isAzureMonitorTargetEnabled are required.
Possible values include: 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.BlobAuditingPolicyState
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). If state is Enabled,
storageEndpoint is required.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
auditing storage account. If state is Enabled and storageEndpoint is
specified, storageAccountAccessKey is required.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the audit
logs in the storage account.
:type retention_days: int
:param audit_actions_and_groups: Specifies the Actions-Groups and Actions
to audit.
The recommended set of action groups to use is the following combination -
this will audit all the queries and stored procedures executed against the
database, as well as successful and failed logins:
BATCH_COMPLETED_GROUP,
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP,
FAILED_DATABASE_AUTHENTICATION_GROUP.
This above combination is also the set that is configured by default when
enabling auditing from the Azure portal.
The supported action groups to audit are (note: choose only specific
groups that cover your auditing needs. Using unnecessary groups could lead
to very large quantities of audit records):
APPLICATION_ROLE_CHANGE_PASSWORD_GROUP
BACKUP_RESTORE_GROUP
DATABASE_LOGOUT_GROUP
DATABASE_OBJECT_CHANGE_GROUP
DATABASE_OBJECT_OWNERSHIP_CHANGE_GROUP
DATABASE_OBJECT_PERMISSION_CHANGE_GROUP
DATABASE_OPERATION_GROUP
DATABASE_PERMISSION_CHANGE_GROUP
DATABASE_PRINCIPAL_CHANGE_GROUP
DATABASE_PRINCIPAL_IMPERSONATION_GROUP
DATABASE_ROLE_MEMBER_CHANGE_GROUP
FAILED_DATABASE_AUTHENTICATION_GROUP
SCHEMA_OBJECT_ACCESS_GROUP
SCHEMA_OBJECT_CHANGE_GROUP
SCHEMA_OBJECT_OWNERSHIP_CHANGE_GROUP
SCHEMA_OBJECT_PERMISSION_CHANGE_GROUP
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP
USER_CHANGE_PASSWORD_GROUP
BATCH_STARTED_GROUP
BATCH_COMPLETED_GROUP
These are groups that cover all sql statements and stored procedures
executed against the database, and should not be used in combination with
other groups as this will result in duplicate audit logs.
For more information, see [Database-Level Audit Action
Groups](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-action-groups).
For Database auditing policy, specific Actions can also be specified (note
that Actions cannot be specified for Server auditing policy). The
supported actions to audit are:
SELECT
UPDATE
INSERT
DELETE
EXECUTE
RECEIVE
REFERENCES
The general form for defining an action to be audited is:
{action} ON {object} BY {principal}
Note that <object> in the above format can refer to an object like a
table, view, or stored procedure, or an entire database or schema. For the
latter cases, the forms DATABASE::{db_name} and SCHEMA::{schema_name} are
used, respectively.
For example:
SELECT on dbo.myTable by public
SELECT on DATABASE::myDatabase by public
SELECT on SCHEMA::mySchema by public
For more information, see [Database-Level Audit
Actions](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-actions)
:type audit_actions_and_groups: list[str]
:param storage_account_subscription_id: Specifies the blob storage
subscription Id.
:type storage_account_subscription_id: str
:param is_storage_secondary_key_in_use: Specifies whether
storageAccountAccessKey value is the storage's secondary key.
:type is_storage_secondary_key_in_use: bool
:param is_azure_monitor_target_enabled: Specifies whether audit events are
sent to Azure Monitor.
In order to send the events to Azure Monitor, specify 'state' as 'Enabled'
and 'isAzureMonitorTargetEnabled' as true.
When using REST API to configure auditing, Diagnostic Settings with
'SQLSecurityAuditEvents' diagnostic logs category on the database should
be also created.
Note that for server level audit you should use the 'master' database as
{databaseName}.
Diagnostic Settings URI format:
PUT
https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/providers/microsoft.insights/diagnosticSettings/{settingsName}?api-version=2017-05-01-preview
For more information, see [Diagnostic Settings REST
API](https://go.microsoft.com/fwlink/?linkid=2033207)
or [Diagnostic Settings
PowerShell](https://go.microsoft.com/fwlink/?linkid=2033043)
:type is_azure_monitor_target_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'predicate_expression': {'key': 'properties.predicateExpression', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'BlobAuditingPolicyState'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'audit_actions_and_groups': {'key': 'properties.auditActionsAndGroups', 'type': '[str]'},
'storage_account_subscription_id': {'key': 'properties.storageAccountSubscriptionId', 'type': 'str'},
'is_storage_secondary_key_in_use': {'key': 'properties.isStorageSecondaryKeyInUse', 'type': 'bool'},
'is_azure_monitor_target_enabled': {'key': 'properties.isAzureMonitorTargetEnabled', 'type': 'bool'},
}
def __init__(self, *, state, predicate_expression: str=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, audit_actions_and_groups=None, storage_account_subscription_id: str=None, is_storage_secondary_key_in_use: bool=None, is_azure_monitor_target_enabled: bool=None, **kwargs) -> None:
super(ExtendedDatabaseBlobAuditingPolicy, self).__init__(**kwargs)
self.predicate_expression = predicate_expression
self.state = state
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.audit_actions_and_groups = audit_actions_and_groups
self.storage_account_subscription_id = storage_account_subscription_id
self.is_storage_secondary_key_in_use = is_storage_secondary_key_in_use
self.is_azure_monitor_target_enabled = is_azure_monitor_target_enabled
class ExtendedServerBlobAuditingPolicy(ProxyResource):
"""An extended server blob auditing policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param predicate_expression: Specifies condition of where clause when
creating an audit.
:type predicate_expression: str
:param state: Required. Specifies the state of the policy. If state is
Enabled, storageEndpoint or isAzureMonitorTargetEnabled are required.
Possible values include: 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.BlobAuditingPolicyState
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). If state is Enabled,
storageEndpoint is required.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
auditing storage account. If state is Enabled and storageEndpoint is
specified, storageAccountAccessKey is required.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the audit
logs in the storage account.
:type retention_days: int
:param audit_actions_and_groups: Specifies the Actions-Groups and Actions
to audit.
The recommended set of action groups to use is the following combination -
this will audit all the queries and stored procedures executed against the
database, as well as successful and failed logins:
BATCH_COMPLETED_GROUP,
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP,
FAILED_DATABASE_AUTHENTICATION_GROUP.
This above combination is also the set that is configured by default when
enabling auditing from the Azure portal.
The supported action groups to audit are (note: choose only specific
groups that cover your auditing needs. Using unnecessary groups could lead
to very large quantities of audit records):
APPLICATION_ROLE_CHANGE_PASSWORD_GROUP
BACKUP_RESTORE_GROUP
DATABASE_LOGOUT_GROUP
DATABASE_OBJECT_CHANGE_GROUP
DATABASE_OBJECT_OWNERSHIP_CHANGE_GROUP
DATABASE_OBJECT_PERMISSION_CHANGE_GROUP
DATABASE_OPERATION_GROUP
DATABASE_PERMISSION_CHANGE_GROUP
DATABASE_PRINCIPAL_CHANGE_GROUP
DATABASE_PRINCIPAL_IMPERSONATION_GROUP
DATABASE_ROLE_MEMBER_CHANGE_GROUP
FAILED_DATABASE_AUTHENTICATION_GROUP
SCHEMA_OBJECT_ACCESS_GROUP
SCHEMA_OBJECT_CHANGE_GROUP
SCHEMA_OBJECT_OWNERSHIP_CHANGE_GROUP
SCHEMA_OBJECT_PERMISSION_CHANGE_GROUP
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP
USER_CHANGE_PASSWORD_GROUP
BATCH_STARTED_GROUP
BATCH_COMPLETED_GROUP
These are groups that cover all sql statements and stored procedures
executed against the database, and should not be used in combination with
other groups as this will result in duplicate audit logs.
For more information, see [Database-Level Audit Action
Groups](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-action-groups).
For Database auditing policy, specific Actions can also be specified (note
that Actions cannot be specified for Server auditing policy). The
supported actions to audit are:
SELECT
UPDATE
INSERT
DELETE
EXECUTE
RECEIVE
REFERENCES
The general form for defining an action to be audited is:
{action} ON {object} BY {principal}
Note that <object> in the above format can refer to an object like a
table, view, or stored procedure, or an entire database or schema. For the
latter cases, the forms DATABASE::{db_name} and SCHEMA::{schema_name} are
used, respectively.
For example:
SELECT on dbo.myTable by public
SELECT on DATABASE::myDatabase by public
SELECT on SCHEMA::mySchema by public
For more information, see [Database-Level Audit
Actions](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-actions)
:type audit_actions_and_groups: list[str]
:param storage_account_subscription_id: Specifies the blob storage
subscription Id.
:type storage_account_subscription_id: str
:param is_storage_secondary_key_in_use: Specifies whether
storageAccountAccessKey value is the storage's secondary key.
:type is_storage_secondary_key_in_use: bool
:param is_azure_monitor_target_enabled: Specifies whether audit events are
sent to Azure Monitor.
In order to send the events to Azure Monitor, specify 'state' as 'Enabled'
and 'isAzureMonitorTargetEnabled' as true.
When using REST API to configure auditing, Diagnostic Settings with
'SQLSecurityAuditEvents' diagnostic logs category on the database should
be also created.
Note that for server level audit you should use the 'master' database as
{databaseName}.
Diagnostic Settings URI format:
PUT
https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/providers/microsoft.insights/diagnosticSettings/{settingsName}?api-version=2017-05-01-preview
For more information, see [Diagnostic Settings REST
API](https://go.microsoft.com/fwlink/?linkid=2033207)
or [Diagnostic Settings
PowerShell](https://go.microsoft.com/fwlink/?linkid=2033043)
:type is_azure_monitor_target_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'predicate_expression': {'key': 'properties.predicateExpression', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'BlobAuditingPolicyState'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'audit_actions_and_groups': {'key': 'properties.auditActionsAndGroups', 'type': '[str]'},
'storage_account_subscription_id': {'key': 'properties.storageAccountSubscriptionId', 'type': 'str'},
'is_storage_secondary_key_in_use': {'key': 'properties.isStorageSecondaryKeyInUse', 'type': 'bool'},
'is_azure_monitor_target_enabled': {'key': 'properties.isAzureMonitorTargetEnabled', 'type': 'bool'},
}
def __init__(self, *, state, predicate_expression: str=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, audit_actions_and_groups=None, storage_account_subscription_id: str=None, is_storage_secondary_key_in_use: bool=None, is_azure_monitor_target_enabled: bool=None, **kwargs) -> None:
super(ExtendedServerBlobAuditingPolicy, self).__init__(**kwargs)
self.predicate_expression = predicate_expression
self.state = state
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.audit_actions_and_groups = audit_actions_and_groups
self.storage_account_subscription_id = storage_account_subscription_id
self.is_storage_secondary_key_in_use = is_storage_secondary_key_in_use
self.is_azure_monitor_target_enabled = is_azure_monitor_target_enabled
class FailoverGroup(ProxyResource):
"""A failover group.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param read_write_endpoint: Required. Read-write endpoint of the failover
group instance.
:type read_write_endpoint:
~azure.mgmt.sql.models.FailoverGroupReadWriteEndpoint
:param read_only_endpoint: Read-only endpoint of the failover group
instance.
:type read_only_endpoint:
~azure.mgmt.sql.models.FailoverGroupReadOnlyEndpoint
:ivar replication_role: Local replication role of the failover group
instance. Possible values include: 'Primary', 'Secondary'
:vartype replication_role: str or
~azure.mgmt.sql.models.FailoverGroupReplicationRole
:ivar replication_state: Replication state of the failover group instance.
:vartype replication_state: str
:param partner_servers: Required. List of partner server information for
the failover group.
:type partner_servers: list[~azure.mgmt.sql.models.PartnerInfo]
:param databases: List of databases in the failover group.
:type databases: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'read_write_endpoint': {'required': True},
'replication_role': {'readonly': True},
'replication_state': {'readonly': True},
'partner_servers': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'read_write_endpoint': {'key': 'properties.readWriteEndpoint', 'type': 'FailoverGroupReadWriteEndpoint'},
'read_only_endpoint': {'key': 'properties.readOnlyEndpoint', 'type': 'FailoverGroupReadOnlyEndpoint'},
'replication_role': {'key': 'properties.replicationRole', 'type': 'str'},
'replication_state': {'key': 'properties.replicationState', 'type': 'str'},
'partner_servers': {'key': 'properties.partnerServers', 'type': '[PartnerInfo]'},
'databases': {'key': 'properties.databases', 'type': '[str]'},
}
def __init__(self, *, read_write_endpoint, partner_servers, tags=None, read_only_endpoint=None, databases=None, **kwargs) -> None:
super(FailoverGroup, self).__init__(**kwargs)
self.location = None
self.tags = tags
self.read_write_endpoint = read_write_endpoint
self.read_only_endpoint = read_only_endpoint
self.replication_role = None
self.replication_state = None
self.partner_servers = partner_servers
self.databases = databases
class FailoverGroupReadOnlyEndpoint(Model):
"""Read-only endpoint of the failover group instance.
:param failover_policy: Failover policy of the read-only endpoint for the
failover group. Possible values include: 'Disabled', 'Enabled'
:type failover_policy: str or
~azure.mgmt.sql.models.ReadOnlyEndpointFailoverPolicy
"""
_attribute_map = {
'failover_policy': {'key': 'failoverPolicy', 'type': 'str'},
}
def __init__(self, *, failover_policy=None, **kwargs) -> None:
super(FailoverGroupReadOnlyEndpoint, self).__init__(**kwargs)
self.failover_policy = failover_policy
class FailoverGroupReadWriteEndpoint(Model):
"""Read-write endpoint of the failover group instance.
All required parameters must be populated in order to send to Azure.
:param failover_policy: Required. Failover policy of the read-write
endpoint for the failover group. If failoverPolicy is Automatic then
failoverWithDataLossGracePeriodMinutes is required. Possible values
include: 'Manual', 'Automatic'
:type failover_policy: str or
~azure.mgmt.sql.models.ReadWriteEndpointFailoverPolicy
:param failover_with_data_loss_grace_period_minutes: Grace period before
failover with data loss is attempted for the read-write endpoint. If
failoverPolicy is Automatic then failoverWithDataLossGracePeriodMinutes is
required.
:type failover_with_data_loss_grace_period_minutes: int
"""
_validation = {
'failover_policy': {'required': True},
}
_attribute_map = {
'failover_policy': {'key': 'failoverPolicy', 'type': 'str'},
'failover_with_data_loss_grace_period_minutes': {'key': 'failoverWithDataLossGracePeriodMinutes', 'type': 'int'},
}
def __init__(self, *, failover_policy, failover_with_data_loss_grace_period_minutes: int=None, **kwargs) -> None:
super(FailoverGroupReadWriteEndpoint, self).__init__(**kwargs)
self.failover_policy = failover_policy
self.failover_with_data_loss_grace_period_minutes = failover_with_data_loss_grace_period_minutes
class FailoverGroupUpdate(Model):
"""A failover group update request.
:param read_write_endpoint: Read-write endpoint of the failover group
instance.
:type read_write_endpoint:
~azure.mgmt.sql.models.FailoverGroupReadWriteEndpoint
:param read_only_endpoint: Read-only endpoint of the failover group
instance.
:type read_only_endpoint:
~azure.mgmt.sql.models.FailoverGroupReadOnlyEndpoint
:param databases: List of databases in the failover group.
:type databases: list[str]
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'read_write_endpoint': {'key': 'properties.readWriteEndpoint', 'type': 'FailoverGroupReadWriteEndpoint'},
'read_only_endpoint': {'key': 'properties.readOnlyEndpoint', 'type': 'FailoverGroupReadOnlyEndpoint'},
'databases': {'key': 'properties.databases', 'type': '[str]'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, read_write_endpoint=None, read_only_endpoint=None, databases=None, tags=None, **kwargs) -> None:
super(FailoverGroupUpdate, self).__init__(**kwargs)
self.read_write_endpoint = read_write_endpoint
self.read_only_endpoint = read_only_endpoint
self.databases = databases
self.tags = tags
class FirewallRule(ProxyResource):
"""Represents a server firewall rule.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Kind of server that contains this firewall rule.
:vartype kind: str
:ivar location: Location of the server that contains this firewall rule.
:vartype location: str
:param start_ip_address: Required. The start IP address of the firewall
rule. Must be IPv4 format. Use value '0.0.0.0' to represent all
Azure-internal IP addresses.
:type start_ip_address: str
:param end_ip_address: Required. The end IP address of the firewall rule.
Must be IPv4 format. Must be greater than or equal to startIpAddress. Use
value '0.0.0.0' to represent all Azure-internal IP addresses.
:type end_ip_address: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'location': {'readonly': True},
'start_ip_address': {'required': True},
'end_ip_address': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'start_ip_address': {'key': 'properties.startIpAddress', 'type': 'str'},
'end_ip_address': {'key': 'properties.endIpAddress', 'type': 'str'},
}
def __init__(self, *, start_ip_address: str, end_ip_address: str, **kwargs) -> None:
super(FirewallRule, self).__init__(**kwargs)
self.kind = None
self.location = None
self.start_ip_address = start_ip_address
self.end_ip_address = end_ip_address
class GeoBackupPolicy(ProxyResource):
"""A database geo backup policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param state: Required. The state of the geo backup policy. Possible
values include: 'Disabled', 'Enabled'
:type state: str or ~azure.mgmt.sql.models.GeoBackupPolicyState
:ivar storage_type: The storage type of the geo backup policy.
:vartype storage_type: str
:ivar kind: Kind of geo backup policy. This is metadata used for the
Azure portal experience.
:vartype kind: str
:ivar location: Backup policy location.
:vartype location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
'storage_type': {'readonly': True},
'kind': {'readonly': True},
'location': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'GeoBackupPolicyState'},
'storage_type': {'key': 'properties.storageType', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(self, *, state, **kwargs) -> None:
super(GeoBackupPolicy, self).__init__(**kwargs)
self.state = state
self.storage_type = None
self.kind = None
self.location = None
class ImportExportResponse(ProxyResource):
"""Response for Import/Export Get operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar request_type: The request type of the operation.
:vartype request_type: str
:ivar request_id: The request type of the operation.
:vartype request_id: str
:ivar server_name: The name of the server.
:vartype server_name: str
:ivar database_name: The name of the database.
:vartype database_name: str
:ivar status: The status message returned from the server.
:vartype status: str
:ivar last_modified_time: The operation status last modified time.
:vartype last_modified_time: str
:ivar queued_time: The operation queued time.
:vartype queued_time: str
:ivar blob_uri: The blob uri.
:vartype blob_uri: str
:ivar error_message: The error message returned from the server.
:vartype error_message: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'request_type': {'readonly': True},
'request_id': {'readonly': True},
'server_name': {'readonly': True},
'database_name': {'readonly': True},
'status': {'readonly': True},
'last_modified_time': {'readonly': True},
'queued_time': {'readonly': True},
'blob_uri': {'readonly': True},
'error_message': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'request_type': {'key': 'properties.requestType', 'type': 'str'},
'request_id': {'key': 'properties.requestId', 'type': 'str'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'last_modified_time': {'key': 'properties.lastModifiedTime', 'type': 'str'},
'queued_time': {'key': 'properties.queuedTime', 'type': 'str'},
'blob_uri': {'key': 'properties.blobUri', 'type': 'str'},
'error_message': {'key': 'properties.errorMessage', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(ImportExportResponse, self).__init__(**kwargs)
self.request_type = None
self.request_id = None
self.server_name = None
self.database_name = None
self.status = None
self.last_modified_time = None
self.queued_time = None
self.blob_uri = None
self.error_message = None
class ImportExtensionRequest(Model):
"""Import database parameters.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param name: The name of the extension.
:type name: str
:param type: The type of the extension.
:type type: str
:param storage_key_type: Required. The type of the storage key to use.
Possible values include: 'StorageAccessKey', 'SharedAccessKey'
:type storage_key_type: str or ~azure.mgmt.sql.models.StorageKeyType
:param storage_key: Required. The storage key to use. If storage key type
is SharedAccessKey, it must be preceded with a "?."
:type storage_key: str
:param storage_uri: Required. The storage uri to use.
:type storage_uri: str
:param administrator_login: Required. The name of the SQL administrator.
:type administrator_login: str
:param administrator_login_password: Required. The password of the SQL
administrator.
:type administrator_login_password: str
:param authentication_type: The authentication type. Possible values
include: 'SQL', 'ADPassword'. Default value: "SQL" .
:type authentication_type: str or
~azure.mgmt.sql.models.AuthenticationType
:ivar operation_mode: Required. The type of import operation being
performed. This is always Import. Default value: "Import" .
:vartype operation_mode: str
"""
_validation = {
'storage_key_type': {'required': True},
'storage_key': {'required': True},
'storage_uri': {'required': True},
'administrator_login': {'required': True},
'administrator_login_password': {'required': True},
'operation_mode': {'required': True, 'constant': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_key_type': {'key': 'properties.storageKeyType', 'type': 'StorageKeyType'},
'storage_key': {'key': 'properties.storageKey', 'type': 'str'},
'storage_uri': {'key': 'properties.storageUri', 'type': 'str'},
'administrator_login': {'key': 'properties.administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'properties.administratorLoginPassword', 'type': 'str'},
'authentication_type': {'key': 'properties.authenticationType', 'type': 'AuthenticationType'},
'operation_mode': {'key': 'properties.operationMode', 'type': 'str'},
}
operation_mode = "Import"
def __init__(self, *, storage_key_type, storage_key: str, storage_uri: str, administrator_login: str, administrator_login_password: str, name: str=None, type: str=None, authentication_type="SQL", **kwargs) -> None:
super(ImportExtensionRequest, self).__init__(**kwargs)
self.name = name
self.type = type
self.storage_key_type = storage_key_type
self.storage_key = storage_key
self.storage_uri = storage_uri
self.administrator_login = administrator_login
self.administrator_login_password = administrator_login_password
self.authentication_type = authentication_type
class ImportRequest(ExportRequest):
"""Import database parameters.
All required parameters must be populated in order to send to Azure.
:param storage_key_type: Required. The type of the storage key to use.
Possible values include: 'StorageAccessKey', 'SharedAccessKey'
:type storage_key_type: str or ~azure.mgmt.sql.models.StorageKeyType
:param storage_key: Required. The storage key to use. If storage key type
is SharedAccessKey, it must be preceded with a "?."
:type storage_key: str
:param storage_uri: Required. The storage uri to use.
:type storage_uri: str
:param administrator_login: Required. The name of the SQL administrator.
:type administrator_login: str
:param administrator_login_password: Required. The password of the SQL
administrator.
:type administrator_login_password: str
:param authentication_type: The authentication type. Possible values
include: 'SQL', 'ADPassword'. Default value: "SQL" .
:type authentication_type: str or
~azure.mgmt.sql.models.AuthenticationType
:param database_name: Required. The name of the database to import.
:type database_name: str
:param edition: Required. The edition for the database being created.
The list of SKUs may vary by region and support offer. To determine the
SKUs (including the SKU name, tier/edition, family, and capacity) that are
available to your subscription in an Azure region, use the
`Capabilities_ListByLocation` REST API or one of the following commands:
```azurecli
az sql db list-editions -l <location> -o table
````
```powershell
Get-AzSqlServerServiceObjective -Location <location>
````
. Possible values include: 'Web', 'Business', 'Basic', 'Standard',
'Premium', 'PremiumRS', 'Free', 'Stretch', 'DataWarehouse', 'System',
'System2', 'GeneralPurpose', 'BusinessCritical', 'Hyperscale'
:type edition: str or ~azure.mgmt.sql.models.DatabaseEdition
:param service_objective_name: Required. The name of the service objective
to assign to the database. Possible values include: 'System', 'System0',
'System1', 'System2', 'System3', 'System4', 'System2L', 'System3L',
'System4L', 'Free', 'Basic', 'S0', 'S1', 'S2', 'S3', 'S4', 'S6', 'S7',
'S9', 'S12', 'P1', 'P2', 'P3', 'P4', 'P6', 'P11', 'P15', 'PRS1', 'PRS2',
'PRS4', 'PRS6', 'DW100', 'DW200', 'DW300', 'DW400', 'DW500', 'DW600',
'DW1000', 'DW1200', 'DW1000c', 'DW1500', 'DW1500c', 'DW2000', 'DW2000c',
'DW3000', 'DW2500c', 'DW3000c', 'DW6000', 'DW5000c', 'DW6000c', 'DW7500c',
'DW10000c', 'DW15000c', 'DW30000c', 'DS100', 'DS200', 'DS300', 'DS400',
'DS500', 'DS600', 'DS1000', 'DS1200', 'DS1500', 'DS2000', 'ElasticPool'
:type service_objective_name: str or
~azure.mgmt.sql.models.ServiceObjectiveName
:param max_size_bytes: Required. The maximum size for the newly imported
database.
:type max_size_bytes: str
"""
_validation = {
'storage_key_type': {'required': True},
'storage_key': {'required': True},
'storage_uri': {'required': True},
'administrator_login': {'required': True},
'administrator_login_password': {'required': True},
'database_name': {'required': True},
'edition': {'required': True},
'service_objective_name': {'required': True},
'max_size_bytes': {'required': True},
}
_attribute_map = {
'storage_key_type': {'key': 'storageKeyType', 'type': 'StorageKeyType'},
'storage_key': {'key': 'storageKey', 'type': 'str'},
'storage_uri': {'key': 'storageUri', 'type': 'str'},
'administrator_login': {'key': 'administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'administratorLoginPassword', 'type': 'str'},
'authentication_type': {'key': 'authenticationType', 'type': 'AuthenticationType'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'edition': {'key': 'edition', 'type': 'str'},
'service_objective_name': {'key': 'serviceObjectiveName', 'type': 'str'},
'max_size_bytes': {'key': 'maxSizeBytes', 'type': 'str'},
}
def __init__(self, *, storage_key_type, storage_key: str, storage_uri: str, administrator_login: str, administrator_login_password: str, database_name: str, edition, service_objective_name, max_size_bytes: str, authentication_type="SQL", **kwargs) -> None:
super(ImportRequest, self).__init__(storage_key_type=storage_key_type, storage_key=storage_key, storage_uri=storage_uri, administrator_login=administrator_login, administrator_login_password=administrator_login_password, authentication_type=authentication_type, **kwargs)
self.database_name = database_name
self.edition = edition
self.service_objective_name = service_objective_name
self.max_size_bytes = max_size_bytes
class InstanceFailoverGroup(ProxyResource):
"""An instance failover group.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param read_write_endpoint: Required. Read-write endpoint of the failover
group instance.
:type read_write_endpoint:
~azure.mgmt.sql.models.InstanceFailoverGroupReadWriteEndpoint
:param read_only_endpoint: Read-only endpoint of the failover group
instance.
:type read_only_endpoint:
~azure.mgmt.sql.models.InstanceFailoverGroupReadOnlyEndpoint
:ivar replication_role: Local replication role of the failover group
instance. Possible values include: 'Primary', 'Secondary'
:vartype replication_role: str or
~azure.mgmt.sql.models.InstanceFailoverGroupReplicationRole
:ivar replication_state: Replication state of the failover group instance.
:vartype replication_state: str
:param partner_regions: Required. Partner region information for the
failover group.
:type partner_regions: list[~azure.mgmt.sql.models.PartnerRegionInfo]
:param managed_instance_pairs: Required. List of managed instance pairs in
the failover group.
:type managed_instance_pairs:
list[~azure.mgmt.sql.models.ManagedInstancePairInfo]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'read_write_endpoint': {'required': True},
'replication_role': {'readonly': True},
'replication_state': {'readonly': True},
'partner_regions': {'required': True},
'managed_instance_pairs': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'read_write_endpoint': {'key': 'properties.readWriteEndpoint', 'type': 'InstanceFailoverGroupReadWriteEndpoint'},
'read_only_endpoint': {'key': 'properties.readOnlyEndpoint', 'type': 'InstanceFailoverGroupReadOnlyEndpoint'},
'replication_role': {'key': 'properties.replicationRole', 'type': 'str'},
'replication_state': {'key': 'properties.replicationState', 'type': 'str'},
'partner_regions': {'key': 'properties.partnerRegions', 'type': '[PartnerRegionInfo]'},
'managed_instance_pairs': {'key': 'properties.managedInstancePairs', 'type': '[ManagedInstancePairInfo]'},
}
def __init__(self, *, read_write_endpoint, partner_regions, managed_instance_pairs, read_only_endpoint=None, **kwargs) -> None:
super(InstanceFailoverGroup, self).__init__(**kwargs)
self.read_write_endpoint = read_write_endpoint
self.read_only_endpoint = read_only_endpoint
self.replication_role = None
self.replication_state = None
self.partner_regions = partner_regions
self.managed_instance_pairs = managed_instance_pairs
class InstanceFailoverGroupReadOnlyEndpoint(Model):
"""Read-only endpoint of the failover group instance.
:param failover_policy: Failover policy of the read-only endpoint for the
failover group. Possible values include: 'Disabled', 'Enabled'
:type failover_policy: str or
~azure.mgmt.sql.models.ReadOnlyEndpointFailoverPolicy
"""
_attribute_map = {
'failover_policy': {'key': 'failoverPolicy', 'type': 'str'},
}
def __init__(self, *, failover_policy=None, **kwargs) -> None:
super(InstanceFailoverGroupReadOnlyEndpoint, self).__init__(**kwargs)
self.failover_policy = failover_policy
class InstanceFailoverGroupReadWriteEndpoint(Model):
"""Read-write endpoint of the failover group instance.
All required parameters must be populated in order to send to Azure.
:param failover_policy: Required. Failover policy of the read-write
endpoint for the failover group. If failoverPolicy is Automatic then
failoverWithDataLossGracePeriodMinutes is required. Possible values
include: 'Manual', 'Automatic'
:type failover_policy: str or
~azure.mgmt.sql.models.ReadWriteEndpointFailoverPolicy
:param failover_with_data_loss_grace_period_minutes: Grace period before
failover with data loss is attempted for the read-write endpoint. If
failoverPolicy is Automatic then failoverWithDataLossGracePeriodMinutes is
required.
:type failover_with_data_loss_grace_period_minutes: int
"""
_validation = {
'failover_policy': {'required': True},
}
_attribute_map = {
'failover_policy': {'key': 'failoverPolicy', 'type': 'str'},
'failover_with_data_loss_grace_period_minutes': {'key': 'failoverWithDataLossGracePeriodMinutes', 'type': 'int'},
}
def __init__(self, *, failover_policy, failover_with_data_loss_grace_period_minutes: int=None, **kwargs) -> None:
super(InstanceFailoverGroupReadWriteEndpoint, self).__init__(**kwargs)
self.failover_policy = failover_policy
self.failover_with_data_loss_grace_period_minutes = failover_with_data_loss_grace_period_minutes
class InstancePool(TrackedResource):
"""An Azure SQL instance pool.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param sku: The name and tier of the SKU.
:type sku: ~azure.mgmt.sql.models.Sku
:param subnet_id: Required. Resource ID of the subnet to place this
instance pool in.
:type subnet_id: str
:param v_cores: Required. Count of vCores belonging to this instance pool.
:type v_cores: int
:param license_type: Required. The license type. Possible values are
'LicenseIncluded' (price for SQL license is included) and 'BasePrice'
(without SQL license price). Possible values include: 'LicenseIncluded',
'BasePrice'
:type license_type: str or ~azure.mgmt.sql.models.InstancePoolLicenseType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'subnet_id': {'required': True},
'v_cores': {'required': True},
'license_type': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
'v_cores': {'key': 'properties.vCores', 'type': 'int'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
}
def __init__(self, *, location: str, subnet_id: str, v_cores: int, license_type, tags=None, sku=None, **kwargs) -> None:
super(InstancePool, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.subnet_id = subnet_id
self.v_cores = v_cores
self.license_type = license_type
class InstancePoolUpdate(Model):
"""An update to an Instance pool.
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, tags=None, **kwargs) -> None:
super(InstancePoolUpdate, self).__init__(**kwargs)
self.tags = tags
class Job(ProxyResource):
"""A job.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param description: User-defined description of the job. Default value: ""
.
:type description: str
:ivar version: The job version number.
:vartype version: int
:param schedule: Schedule properties of the job.
:type schedule: ~azure.mgmt.sql.models.JobSchedule
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'version': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'int'},
'schedule': {'key': 'properties.schedule', 'type': 'JobSchedule'},
}
def __init__(self, *, description: str="", schedule=None, **kwargs) -> None:
super(Job, self).__init__(**kwargs)
self.description = description
self.version = None
self.schedule = schedule
class JobAgent(TrackedResource):
"""An Azure SQL job agent.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param sku: The name and tier of the SKU.
:type sku: ~azure.mgmt.sql.models.Sku
:param database_id: Required. Resource ID of the database to store job
metadata in.
:type database_id: str
:ivar state: The state of the job agent. Possible values include:
'Creating', 'Ready', 'Updating', 'Deleting', 'Disabled'
:vartype state: str or ~azure.mgmt.sql.models.JobAgentState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'database_id': {'required': True},
'state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'sku', 'type': 'Sku'},
'database_id': {'key': 'properties.databaseId', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
}
def __init__(self, *, location: str, database_id: str, tags=None, sku=None, **kwargs) -> None:
super(JobAgent, self).__init__(location=location, tags=tags, **kwargs)
self.sku = sku
self.database_id = database_id
self.state = None
class JobAgentUpdate(Model):
"""An update to an Azure SQL job agent.
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, tags=None, **kwargs) -> None:
super(JobAgentUpdate, self).__init__(**kwargs)
self.tags = tags
class JobCredential(ProxyResource):
"""A stored credential that can be used by a job to connect to target
databases.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param username: Required. The credential user name.
:type username: str
:param password: Required. The credential password.
:type password: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'username': {'required': True},
'password': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'username': {'key': 'properties.username', 'type': 'str'},
'password': {'key': 'properties.password', 'type': 'str'},
}
def __init__(self, *, username: str, password: str, **kwargs) -> None:
super(JobCredential, self).__init__(**kwargs)
self.username = username
self.password = password
class JobExecution(ProxyResource):
"""An execution of a job.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar job_version: The job version number.
:vartype job_version: int
:ivar step_name: The job step name.
:vartype step_name: str
:ivar step_id: The job step id.
:vartype step_id: int
:ivar job_execution_id: The unique identifier of the job execution.
:vartype job_execution_id: str
:ivar lifecycle: The detailed state of the job execution. Possible values
include: 'Created', 'InProgress', 'WaitingForChildJobExecutions',
'WaitingForRetry', 'Succeeded', 'SucceededWithSkipped', 'Failed',
'TimedOut', 'Canceled', 'Skipped'
:vartype lifecycle: str or ~azure.mgmt.sql.models.JobExecutionLifecycle
:ivar provisioning_state: The ARM provisioning state of the job execution.
Possible values include: 'Created', 'InProgress', 'Succeeded', 'Failed',
'Canceled'
:vartype provisioning_state: str or
~azure.mgmt.sql.models.ProvisioningState
:ivar create_time: The time that the job execution was created.
:vartype create_time: datetime
:ivar start_time: The time that the job execution started.
:vartype start_time: datetime
:ivar end_time: The time that the job execution completed.
:vartype end_time: datetime
:param current_attempts: Number of times the job execution has been
attempted.
:type current_attempts: int
:ivar current_attempt_start_time: Start time of the current attempt.
:vartype current_attempt_start_time: datetime
:ivar last_message: The last status or error message.
:vartype last_message: str
:ivar target: The target that this execution is executed on.
:vartype target: ~azure.mgmt.sql.models.JobExecutionTarget
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'job_version': {'readonly': True},
'step_name': {'readonly': True},
'step_id': {'readonly': True},
'job_execution_id': {'readonly': True},
'lifecycle': {'readonly': True},
'provisioning_state': {'readonly': True},
'create_time': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'current_attempt_start_time': {'readonly': True},
'last_message': {'readonly': True},
'target': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'job_version': {'key': 'properties.jobVersion', 'type': 'int'},
'step_name': {'key': 'properties.stepName', 'type': 'str'},
'step_id': {'key': 'properties.stepId', 'type': 'int'},
'job_execution_id': {'key': 'properties.jobExecutionId', 'type': 'str'},
'lifecycle': {'key': 'properties.lifecycle', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'current_attempts': {'key': 'properties.currentAttempts', 'type': 'int'},
'current_attempt_start_time': {'key': 'properties.currentAttemptStartTime', 'type': 'iso-8601'},
'last_message': {'key': 'properties.lastMessage', 'type': 'str'},
'target': {'key': 'properties.target', 'type': 'JobExecutionTarget'},
}
def __init__(self, *, current_attempts: int=None, **kwargs) -> None:
super(JobExecution, self).__init__(**kwargs)
self.job_version = None
self.step_name = None
self.step_id = None
self.job_execution_id = None
self.lifecycle = None
self.provisioning_state = None
self.create_time = None
self.start_time = None
self.end_time = None
self.current_attempts = current_attempts
self.current_attempt_start_time = None
self.last_message = None
self.target = None
class JobExecutionTarget(Model):
"""The target that a job execution is executed on.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar type: The type of the target. Possible values include:
'TargetGroup', 'SqlDatabase', 'SqlElasticPool', 'SqlShardMap', 'SqlServer'
:vartype type: str or ~azure.mgmt.sql.models.JobTargetType
:ivar server_name: The server name.
:vartype server_name: str
:ivar database_name: The database name.
:vartype database_name: str
"""
_validation = {
'type': {'readonly': True},
'server_name': {'readonly': True},
'database_name': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'server_name': {'key': 'serverName', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(JobExecutionTarget, self).__init__(**kwargs)
self.type = None
self.server_name = None
self.database_name = None
class JobSchedule(Model):
"""Scheduling properties of a job.
:param start_time: Schedule start time. Default value:
"0001-01-01T00:00:00Z" .
:type start_time: datetime
:param end_time: Schedule end time. Default value: "9999-12-31T11:59:59Z"
.
:type end_time: datetime
:param type: Schedule interval type. Possible values include: 'Once',
'Recurring'. Default value: "Once" .
:type type: str or ~azure.mgmt.sql.models.JobScheduleType
:param enabled: Whether or not the schedule is enabled.
:type enabled: bool
:param interval: Value of the schedule's recurring interval, if the
schedule type is recurring. ISO8601 duration format.
:type interval: str
"""
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'JobScheduleType'},
'enabled': {'key': 'enabled', 'type': 'bool'},
'interval': {'key': 'interval', 'type': 'str'},
}
def __init__(self, *, start_time="0001-01-01T00:00:00Z", end_time="9999-12-31T11:59:59Z", type="Once", enabled: bool=None, interval: str=None, **kwargs) -> None:
super(JobSchedule, self).__init__(**kwargs)
self.start_time = start_time
self.end_time = end_time
self.type = type
self.enabled = enabled
self.interval = interval
class JobStep(ProxyResource):
"""A job step.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param step_id: The job step's index within the job. If not specified when
creating the job step, it will be created as the last step. If not
specified when updating the job step, the step id is not modified.
:type step_id: int
:param target_group: Required. The resource ID of the target group that
the job step will be executed on.
:type target_group: str
:param credential: Required. The resource ID of the job credential that
will be used to connect to the targets.
:type credential: str
:param action: Required. The action payload of the job step.
:type action: ~azure.mgmt.sql.models.JobStepAction
:param output: Output destination properties of the job step.
:type output: ~azure.mgmt.sql.models.JobStepOutput
:param execution_options: Execution options for the job step.
:type execution_options: ~azure.mgmt.sql.models.JobStepExecutionOptions
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'target_group': {'required': True},
'credential': {'required': True},
'action': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'step_id': {'key': 'properties.stepId', 'type': 'int'},
'target_group': {'key': 'properties.targetGroup', 'type': 'str'},
'credential': {'key': 'properties.credential', 'type': 'str'},
'action': {'key': 'properties.action', 'type': 'JobStepAction'},
'output': {'key': 'properties.output', 'type': 'JobStepOutput'},
'execution_options': {'key': 'properties.executionOptions', 'type': 'JobStepExecutionOptions'},
}
def __init__(self, *, target_group: str, credential: str, action, step_id: int=None, output=None, execution_options=None, **kwargs) -> None:
super(JobStep, self).__init__(**kwargs)
self.step_id = step_id
self.target_group = target_group
self.credential = credential
self.action = action
self.output = output
self.execution_options = execution_options
class JobStepAction(Model):
"""The action to be executed by a job step.
All required parameters must be populated in order to send to Azure.
:param type: Type of action being executed by the job step. Possible
values include: 'TSql'. Default value: "TSql" .
:type type: str or ~azure.mgmt.sql.models.JobStepActionType
:param source: The source of the action to execute. Possible values
include: 'Inline'. Default value: "Inline" .
:type source: str or ~azure.mgmt.sql.models.JobStepActionSource
:param value: Required. The action value, for example the text of the
T-SQL script to execute.
:type value: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(self, *, value: str, type="TSql", source="Inline", **kwargs) -> None:
super(JobStepAction, self).__init__(**kwargs)
self.type = type
self.source = source
self.value = value
class JobStepExecutionOptions(Model):
"""The execution options of a job step.
:param timeout_seconds: Execution timeout for the job step. Default value:
43200 .
:type timeout_seconds: int
:param retry_attempts: Maximum number of times the job step will be
reattempted if the first attempt fails. Default value: 10 .
:type retry_attempts: int
:param initial_retry_interval_seconds: Initial delay between retries for
job step execution. Default value: 1 .
:type initial_retry_interval_seconds: int
:param maximum_retry_interval_seconds: The maximum amount of time to wait
between retries for job step execution. Default value: 120 .
:type maximum_retry_interval_seconds: int
:param retry_interval_backoff_multiplier: The backoff multiplier for the
time between retries. Default value: 2 .
:type retry_interval_backoff_multiplier: float
"""
_attribute_map = {
'timeout_seconds': {'key': 'timeoutSeconds', 'type': 'int'},
'retry_attempts': {'key': 'retryAttempts', 'type': 'int'},
'initial_retry_interval_seconds': {'key': 'initialRetryIntervalSeconds', 'type': 'int'},
'maximum_retry_interval_seconds': {'key': 'maximumRetryIntervalSeconds', 'type': 'int'},
'retry_interval_backoff_multiplier': {'key': 'retryIntervalBackoffMultiplier', 'type': 'float'},
}
def __init__(self, *, timeout_seconds: int=43200, retry_attempts: int=10, initial_retry_interval_seconds: int=1, maximum_retry_interval_seconds: int=120, retry_interval_backoff_multiplier: float=2, **kwargs) -> None:
super(JobStepExecutionOptions, self).__init__(**kwargs)
self.timeout_seconds = timeout_seconds
self.retry_attempts = retry_attempts
self.initial_retry_interval_seconds = initial_retry_interval_seconds
self.maximum_retry_interval_seconds = maximum_retry_interval_seconds
self.retry_interval_backoff_multiplier = retry_interval_backoff_multiplier
class JobStepOutput(Model):
"""The output configuration of a job step.
All required parameters must be populated in order to send to Azure.
:param type: The output destination type. Possible values include:
'SqlDatabase'. Default value: "SqlDatabase" .
:type type: str or ~azure.mgmt.sql.models.JobStepOutputType
:param subscription_id: The output destination subscription id.
:type subscription_id: str
:param resource_group_name: The output destination resource group.
:type resource_group_name: str
:param server_name: Required. The output destination server name.
:type server_name: str
:param database_name: Required. The output destination database.
:type database_name: str
:param schema_name: The output destination schema. Default value: "dbo" .
:type schema_name: str
:param table_name: Required. The output destination table.
:type table_name: str
:param credential: Required. The resource ID of the credential to use to
connect to the output destination.
:type credential: str
"""
_validation = {
'server_name': {'required': True},
'database_name': {'required': True},
'table_name': {'required': True},
'credential': {'required': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group_name': {'key': 'resourceGroupName', 'type': 'str'},
'server_name': {'key': 'serverName', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'schema_name': {'key': 'schemaName', 'type': 'str'},
'table_name': {'key': 'tableName', 'type': 'str'},
'credential': {'key': 'credential', 'type': 'str'},
}
def __init__(self, *, server_name: str, database_name: str, table_name: str, credential: str, type="SqlDatabase", subscription_id: str=None, resource_group_name: str=None, schema_name: str="dbo", **kwargs) -> None:
super(JobStepOutput, self).__init__(**kwargs)
self.type = type
self.subscription_id = subscription_id
self.resource_group_name = resource_group_name
self.server_name = server_name
self.database_name = database_name
self.schema_name = schema_name
self.table_name = table_name
self.credential = credential
class JobTarget(Model):
"""A job target, for example a specific database or a container of databases
that is evaluated during job execution.
All required parameters must be populated in order to send to Azure.
:param membership_type: Whether the target is included or excluded from
the group. Possible values include: 'Include', 'Exclude'. Default value:
"Include" .
:type membership_type: str or
~azure.mgmt.sql.models.JobTargetGroupMembershipType
:param type: Required. The target type. Possible values include:
'TargetGroup', 'SqlDatabase', 'SqlElasticPool', 'SqlShardMap', 'SqlServer'
:type type: str or ~azure.mgmt.sql.models.JobTargetType
:param server_name: The target server name.
:type server_name: str
:param database_name: The target database name.
:type database_name: str
:param elastic_pool_name: The target elastic pool name.
:type elastic_pool_name: str
:param shard_map_name: The target shard map.
:type shard_map_name: str
:param refresh_credential: The resource ID of the credential that is used
during job execution to connect to the target and determine the list of
databases inside the target.
:type refresh_credential: str
"""
_validation = {
'type': {'required': True},
}
_attribute_map = {
'membership_type': {'key': 'membershipType', 'type': 'JobTargetGroupMembershipType'},
'type': {'key': 'type', 'type': 'str'},
'server_name': {'key': 'serverName', 'type': 'str'},
'database_name': {'key': 'databaseName', 'type': 'str'},
'elastic_pool_name': {'key': 'elasticPoolName', 'type': 'str'},
'shard_map_name': {'key': 'shardMapName', 'type': 'str'},
'refresh_credential': {'key': 'refreshCredential', 'type': 'str'},
}
def __init__(self, *, type, membership_type="Include", server_name: str=None, database_name: str=None, elastic_pool_name: str=None, shard_map_name: str=None, refresh_credential: str=None, **kwargs) -> None:
super(JobTarget, self).__init__(**kwargs)
self.membership_type = membership_type
self.type = type
self.server_name = server_name
self.database_name = database_name
self.elastic_pool_name = elastic_pool_name
self.shard_map_name = shard_map_name
self.refresh_credential = refresh_credential
class JobTargetGroup(ProxyResource):
"""A group of job targets.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param members: Required. Members of the target group.
:type members: list[~azure.mgmt.sql.models.JobTarget]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'members': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'members': {'key': 'properties.members', 'type': '[JobTarget]'},
}
def __init__(self, *, members, **kwargs) -> None:
super(JobTargetGroup, self).__init__(**kwargs)
self.members = members
class JobVersion(ProxyResource):
"""A job version.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(JobVersion, self).__init__(**kwargs)
class LicenseTypeCapability(Model):
"""The license type capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: License type identifier.
:vartype name: str
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(LicenseTypeCapability, self).__init__(**kwargs)
self.name = None
self.status = None
self.reason = reason
class LocationCapabilities(Model):
"""The location capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The location name.
:vartype name: str
:ivar supported_server_versions: The list of supported server versions.
:vartype supported_server_versions:
list[~azure.mgmt.sql.models.ServerVersionCapability]
:ivar supported_managed_instance_versions: The list of supported managed
instance versions.
:vartype supported_managed_instance_versions:
list[~azure.mgmt.sql.models.ManagedInstanceVersionCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'supported_server_versions': {'readonly': True},
'supported_managed_instance_versions': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_server_versions': {'key': 'supportedServerVersions', 'type': '[ServerVersionCapability]'},
'supported_managed_instance_versions': {'key': 'supportedManagedInstanceVersions', 'type': '[ManagedInstanceVersionCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(LocationCapabilities, self).__init__(**kwargs)
self.name = None
self.supported_server_versions = None
self.supported_managed_instance_versions = None
self.status = None
self.reason = reason
class LogSizeCapability(Model):
"""The log size capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar limit: The log size limit (see 'unit' for the units).
:vartype limit: int
:ivar unit: The units that the limit is expressed in. Possible values
include: 'Megabytes', 'Gigabytes', 'Terabytes', 'Petabytes', 'Percent'
:vartype unit: str or ~azure.mgmt.sql.models.LogSizeUnit
"""
_validation = {
'limit': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'limit': {'key': 'limit', 'type': 'int'},
'unit': {'key': 'unit', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(LogSizeCapability, self).__init__(**kwargs)
self.limit = None
self.unit = None
class LongTermRetentionBackup(ProxyResource):
"""A long term retention backup.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar server_name: The server name that the backup database belong to.
:vartype server_name: str
:ivar server_create_time: The create time of the server.
:vartype server_create_time: datetime
:ivar database_name: The name of the database the backup belong to
:vartype database_name: str
:ivar database_deletion_time: The delete time of the database
:vartype database_deletion_time: datetime
:ivar backup_time: The time the backup was taken
:vartype backup_time: datetime
:ivar backup_expiration_time: The time the long term retention backup will
expire.
:vartype backup_expiration_time: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'server_name': {'readonly': True},
'server_create_time': {'readonly': True},
'database_name': {'readonly': True},
'database_deletion_time': {'readonly': True},
'backup_time': {'readonly': True},
'backup_expiration_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'server_create_time': {'key': 'properties.serverCreateTime', 'type': 'iso-8601'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'database_deletion_time': {'key': 'properties.databaseDeletionTime', 'type': 'iso-8601'},
'backup_time': {'key': 'properties.backupTime', 'type': 'iso-8601'},
'backup_expiration_time': {'key': 'properties.backupExpirationTime', 'type': 'iso-8601'},
}
def __init__(self, **kwargs) -> None:
super(LongTermRetentionBackup, self).__init__(**kwargs)
self.server_name = None
self.server_create_time = None
self.database_name = None
self.database_deletion_time = None
self.backup_time = None
self.backup_expiration_time = None
class ManagedBackupShortTermRetentionPolicy(ProxyResource):
"""A short term retention policy.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param retention_days: The backup retention period in days. This is how
many days Point-in-Time Restore will be supported.
:type retention_days: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
}
def __init__(self, *, retention_days: int=None, **kwargs) -> None:
super(ManagedBackupShortTermRetentionPolicy, self).__init__(**kwargs)
self.retention_days = retention_days
class ManagedDatabase(TrackedResource):
"""A managed database resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param collation: Collation of the managed database.
:type collation: str
:ivar status: Status of the database. Possible values include: 'Online',
'Offline', 'Shutdown', 'Creating', 'Inaccessible', 'Updating'
:vartype status: str or ~azure.mgmt.sql.models.ManagedDatabaseStatus
:ivar creation_date: Creation date of the database.
:vartype creation_date: datetime
:ivar earliest_restore_point: Earliest restore point in time for point in
time restore.
:vartype earliest_restore_point: datetime
:param restore_point_in_time: Conditional. If createMode is
PointInTimeRestore, this value is required. Specifies the point in time
(ISO8601 format) of the source database that will be restored to create
the new database.
:type restore_point_in_time: datetime
:ivar default_secondary_location: Geo paired region.
:vartype default_secondary_location: str
:param catalog_collation: Collation of the metadata catalog. Possible
values include: 'DATABASE_DEFAULT', 'SQL_Latin1_General_CP1_CI_AS'
:type catalog_collation: str or
~azure.mgmt.sql.models.CatalogCollationType
:param create_mode: Managed database create mode. PointInTimeRestore:
Create a database by restoring a point in time backup of an existing
database. SourceDatabaseName, SourceManagedInstanceName and PointInTime
must be specified. RestoreExternalBackup: Create a database by restoring
from external backup files. Collation, StorageContainerUri and
StorageContainerSasToken must be specified. Recovery: Creates a database
by restoring a geo-replicated backup. RecoverableDatabaseId must be
specified as the recoverable database resource ID to restore. Possible
values include: 'Default', 'RestoreExternalBackup', 'PointInTimeRestore',
'Recovery'
:type create_mode: str or ~azure.mgmt.sql.models.ManagedDatabaseCreateMode
:param storage_container_uri: Conditional. If createMode is
RestoreExternalBackup, this value is required. Specifies the uri of the
storage container where backups for this restore are stored.
:type storage_container_uri: str
:param source_database_id: The resource identifier of the source database
associated with create operation of this database.
:type source_database_id: str
:param restorable_dropped_database_id: The restorable dropped database
resource id to restore when creating this database.
:type restorable_dropped_database_id: str
:param storage_container_sas_token: Conditional. If createMode is
RestoreExternalBackup, this value is required. Specifies the storage
container sas token.
:type storage_container_sas_token: str
:ivar failover_group_id: Instance Failover Group resource identifier that
this managed database belongs to.
:vartype failover_group_id: str
:param recoverable_database_id: The resource identifier of the recoverable
database associated with create operation of this database.
:type recoverable_database_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'status': {'readonly': True},
'creation_date': {'readonly': True},
'earliest_restore_point': {'readonly': True},
'default_secondary_location': {'readonly': True},
'failover_group_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'collation': {'key': 'properties.collation', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'earliest_restore_point': {'key': 'properties.earliestRestorePoint', 'type': 'iso-8601'},
'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'},
'default_secondary_location': {'key': 'properties.defaultSecondaryLocation', 'type': 'str'},
'catalog_collation': {'key': 'properties.catalogCollation', 'type': 'str'},
'create_mode': {'key': 'properties.createMode', 'type': 'str'},
'storage_container_uri': {'key': 'properties.storageContainerUri', 'type': 'str'},
'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'},
'restorable_dropped_database_id': {'key': 'properties.restorableDroppedDatabaseId', 'type': 'str'},
'storage_container_sas_token': {'key': 'properties.storageContainerSasToken', 'type': 'str'},
'failover_group_id': {'key': 'properties.failoverGroupId', 'type': 'str'},
'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'},
}
def __init__(self, *, location: str, tags=None, collation: str=None, restore_point_in_time=None, catalog_collation=None, create_mode=None, storage_container_uri: str=None, source_database_id: str=None, restorable_dropped_database_id: str=None, storage_container_sas_token: str=None, recoverable_database_id: str=None, **kwargs) -> None:
super(ManagedDatabase, self).__init__(location=location, tags=tags, **kwargs)
self.collation = collation
self.status = None
self.creation_date = None
self.earliest_restore_point = None
self.restore_point_in_time = restore_point_in_time
self.default_secondary_location = None
self.catalog_collation = catalog_collation
self.create_mode = create_mode
self.storage_container_uri = storage_container_uri
self.source_database_id = source_database_id
self.restorable_dropped_database_id = restorable_dropped_database_id
self.storage_container_sas_token = storage_container_sas_token
self.failover_group_id = None
self.recoverable_database_id = recoverable_database_id
class ManagedDatabaseSecurityAlertPolicy(ProxyResource):
"""A managed database security alert policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param state: Required. Specifies the state of the policy, whether it is
enabled or disabled or a policy has not been applied yet on the specific
database. Possible values include: 'New', 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.SecurityAlertPolicyState
:param disabled_alerts: Specifies an array of alerts that are disabled.
Allowed values are: Sql_Injection, Sql_Injection_Vulnerability,
Access_Anomaly, Data_Exfiltration, Unsafe_Action
:type disabled_alerts: list[str]
:param email_addresses: Specifies an array of e-mail addresses to which
the alert is sent.
:type email_addresses: list[str]
:param email_account_admins: Specifies that the alert is sent to the
account administrators.
:type email_account_admins: bool
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). This blob storage will hold all
Threat Detection audit logs.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
Threat Detection audit storage account.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the Threat
Detection audit logs.
:type retention_days: int
:ivar creation_time: Specifies the UTC creation time of the policy.
:vartype creation_time: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
'creation_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'SecurityAlertPolicyState'},
'disabled_alerts': {'key': 'properties.disabledAlerts', 'type': '[str]'},
'email_addresses': {'key': 'properties.emailAddresses', 'type': '[str]'},
'email_account_admins': {'key': 'properties.emailAccountAdmins', 'type': 'bool'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
}
def __init__(self, *, state, disabled_alerts=None, email_addresses=None, email_account_admins: bool=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, **kwargs) -> None:
super(ManagedDatabaseSecurityAlertPolicy, self).__init__(**kwargs)
self.state = state
self.disabled_alerts = disabled_alerts
self.email_addresses = email_addresses
self.email_account_admins = email_account_admins
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.creation_time = None
class ManagedDatabaseUpdate(Model):
"""An managed database update.
Variables are only populated by the server, and will be ignored when
sending a request.
:param collation: Collation of the managed database.
:type collation: str
:ivar status: Status of the database. Possible values include: 'Online',
'Offline', 'Shutdown', 'Creating', 'Inaccessible', 'Updating'
:vartype status: str or ~azure.mgmt.sql.models.ManagedDatabaseStatus
:ivar creation_date: Creation date of the database.
:vartype creation_date: datetime
:ivar earliest_restore_point: Earliest restore point in time for point in
time restore.
:vartype earliest_restore_point: datetime
:param restore_point_in_time: Conditional. If createMode is
PointInTimeRestore, this value is required. Specifies the point in time
(ISO8601 format) of the source database that will be restored to create
the new database.
:type restore_point_in_time: datetime
:ivar default_secondary_location: Geo paired region.
:vartype default_secondary_location: str
:param catalog_collation: Collation of the metadata catalog. Possible
values include: 'DATABASE_DEFAULT', 'SQL_Latin1_General_CP1_CI_AS'
:type catalog_collation: str or
~azure.mgmt.sql.models.CatalogCollationType
:param create_mode: Managed database create mode. PointInTimeRestore:
Create a database by restoring a point in time backup of an existing
database. SourceDatabaseName, SourceManagedInstanceName and PointInTime
must be specified. RestoreExternalBackup: Create a database by restoring
from external backup files. Collation, StorageContainerUri and
StorageContainerSasToken must be specified. Recovery: Creates a database
by restoring a geo-replicated backup. RecoverableDatabaseId must be
specified as the recoverable database resource ID to restore. Possible
values include: 'Default', 'RestoreExternalBackup', 'PointInTimeRestore',
'Recovery'
:type create_mode: str or ~azure.mgmt.sql.models.ManagedDatabaseCreateMode
:param storage_container_uri: Conditional. If createMode is
RestoreExternalBackup, this value is required. Specifies the uri of the
storage container where backups for this restore are stored.
:type storage_container_uri: str
:param source_database_id: The resource identifier of the source database
associated with create operation of this database.
:type source_database_id: str
:param restorable_dropped_database_id: The restorable dropped database
resource id to restore when creating this database.
:type restorable_dropped_database_id: str
:param storage_container_sas_token: Conditional. If createMode is
RestoreExternalBackup, this value is required. Specifies the storage
container sas token.
:type storage_container_sas_token: str
:ivar failover_group_id: Instance Failover Group resource identifier that
this managed database belongs to.
:vartype failover_group_id: str
:param recoverable_database_id: The resource identifier of the recoverable
database associated with create operation of this database.
:type recoverable_database_id: str
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'status': {'readonly': True},
'creation_date': {'readonly': True},
'earliest_restore_point': {'readonly': True},
'default_secondary_location': {'readonly': True},
'failover_group_id': {'readonly': True},
}
_attribute_map = {
'collation': {'key': 'properties.collation', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'earliest_restore_point': {'key': 'properties.earliestRestorePoint', 'type': 'iso-8601'},
'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'},
'default_secondary_location': {'key': 'properties.defaultSecondaryLocation', 'type': 'str'},
'catalog_collation': {'key': 'properties.catalogCollation', 'type': 'str'},
'create_mode': {'key': 'properties.createMode', 'type': 'str'},
'storage_container_uri': {'key': 'properties.storageContainerUri', 'type': 'str'},
'source_database_id': {'key': 'properties.sourceDatabaseId', 'type': 'str'},
'restorable_dropped_database_id': {'key': 'properties.restorableDroppedDatabaseId', 'type': 'str'},
'storage_container_sas_token': {'key': 'properties.storageContainerSasToken', 'type': 'str'},
'failover_group_id': {'key': 'properties.failoverGroupId', 'type': 'str'},
'recoverable_database_id': {'key': 'properties.recoverableDatabaseId', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, collation: str=None, restore_point_in_time=None, catalog_collation=None, create_mode=None, storage_container_uri: str=None, source_database_id: str=None, restorable_dropped_database_id: str=None, storage_container_sas_token: str=None, recoverable_database_id: str=None, tags=None, **kwargs) -> None:
super(ManagedDatabaseUpdate, self).__init__(**kwargs)
self.collation = collation
self.status = None
self.creation_date = None
self.earliest_restore_point = None
self.restore_point_in_time = restore_point_in_time
self.default_secondary_location = None
self.catalog_collation = catalog_collation
self.create_mode = create_mode
self.storage_container_uri = storage_container_uri
self.source_database_id = source_database_id
self.restorable_dropped_database_id = restorable_dropped_database_id
self.storage_container_sas_token = storage_container_sas_token
self.failover_group_id = None
self.recoverable_database_id = recoverable_database_id
self.tags = tags
class ManagedInstance(TrackedResource):
"""An Azure SQL managed instance.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param identity: The Azure Active Directory identity of the managed
instance.
:type identity: ~azure.mgmt.sql.models.ResourceIdentity
:param sku: Managed instance SKU. Allowed values for sku.name: GP_Gen4,
GP_Gen5, BC_Gen4, BC_Gen5
:type sku: ~azure.mgmt.sql.models.Sku
:param managed_instance_create_mode: Specifies the mode of database
creation.
Default: Regular instance creation.
Restore: Creates an instance by restoring a set of backups to specific
point in time. RestorePointInTime and SourceManagedInstanceId must be
specified. Possible values include: 'Default', 'PointInTimeRestore'
:type managed_instance_create_mode: str or
~azure.mgmt.sql.models.ManagedServerCreateMode
:ivar fully_qualified_domain_name: The fully qualified domain name of the
managed instance.
:vartype fully_qualified_domain_name: str
:param administrator_login: Administrator username for the managed
instance. Can only be specified when the managed instance is being created
(and is required for creation).
:type administrator_login: str
:param administrator_login_password: The administrator login password
(required for managed instance creation).
:type administrator_login_password: str
:param subnet_id: Subnet resource ID for the managed instance.
:type subnet_id: str
:ivar state: The state of the managed instance.
:vartype state: str
:param license_type: The license type. Possible values are
'LicenseIncluded' (regular price inclusive of a new SQL license) and
'BasePrice' (discounted AHB price for bringing your own SQL licenses).
Possible values include: 'LicenseIncluded', 'BasePrice'
:type license_type: str or
~azure.mgmt.sql.models.ManagedInstanceLicenseType
:param v_cores: The number of vCores. Allowed values: 8, 16, 24, 32, 40,
64, 80.
:type v_cores: int
:param storage_size_in_gb: Storage size in GB. Minimum value: 32. Maximum
value: 8192. Increments of 32 GB allowed only.
:type storage_size_in_gb: int
:param collation: Collation of the managed instance.
:type collation: str
:ivar dns_zone: The Dns Zone that the managed instance is in.
:vartype dns_zone: str
:param dns_zone_partner: The resource id of another managed instance whose
DNS zone this managed instance will share after creation.
:type dns_zone_partner: str
:param public_data_endpoint_enabled: Whether or not the public data
endpoint is enabled.
:type public_data_endpoint_enabled: bool
:param source_managed_instance_id: The resource identifier of the source
managed instance associated with create operation of this instance.
:type source_managed_instance_id: str
:param restore_point_in_time: Specifies the point in time (ISO8601 format)
of the source database that will be restored to create the new database.
:type restore_point_in_time: datetime
:param proxy_override: Connection type used for connecting to the
instance. Possible values include: 'Proxy', 'Redirect', 'Default'
:type proxy_override: str or
~azure.mgmt.sql.models.ManagedInstanceProxyOverride
:param timezone_id: Id of the timezone. Allowed values are timezones
supported by Windows.
Windows keeps details on supported timezones, including the id, in
registry under
KEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Time
Zones.
You can get those registry values via SQL Server by querying SELECT name
AS timezone_id FROM sys.time_zone_info.
List of Ids can also be obtained by executing
[System.TimeZoneInfo]::GetSystemTimeZones() in PowerShell.
An example of valid timezone id is "Pacific Standard Time" or "W. Europe
Standard Time".
:type timezone_id: str
:param instance_pool_id: The Id of the instance pool this managed server
belongs to.
:type instance_pool_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'fully_qualified_domain_name': {'readonly': True},
'state': {'readonly': True},
'dns_zone': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
'sku': {'key': 'sku', 'type': 'Sku'},
'managed_instance_create_mode': {'key': 'properties.managedInstanceCreateMode', 'type': 'str'},
'fully_qualified_domain_name': {'key': 'properties.fullyQualifiedDomainName', 'type': 'str'},
'administrator_login': {'key': 'properties.administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'properties.administratorLoginPassword', 'type': 'str'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'v_cores': {'key': 'properties.vCores', 'type': 'int'},
'storage_size_in_gb': {'key': 'properties.storageSizeInGB', 'type': 'int'},
'collation': {'key': 'properties.collation', 'type': 'str'},
'dns_zone': {'key': 'properties.dnsZone', 'type': 'str'},
'dns_zone_partner': {'key': 'properties.dnsZonePartner', 'type': 'str'},
'public_data_endpoint_enabled': {'key': 'properties.publicDataEndpointEnabled', 'type': 'bool'},
'source_managed_instance_id': {'key': 'properties.sourceManagedInstanceId', 'type': 'str'},
'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'},
'proxy_override': {'key': 'properties.proxyOverride', 'type': 'str'},
'timezone_id': {'key': 'properties.timezoneId', 'type': 'str'},
'instance_pool_id': {'key': 'properties.instancePoolId', 'type': 'str'},
}
def __init__(self, *, location: str, tags=None, identity=None, sku=None, managed_instance_create_mode=None, administrator_login: str=None, administrator_login_password: str=None, subnet_id: str=None, license_type=None, v_cores: int=None, storage_size_in_gb: int=None, collation: str=None, dns_zone_partner: str=None, public_data_endpoint_enabled: bool=None, source_managed_instance_id: str=None, restore_point_in_time=None, proxy_override=None, timezone_id: str=None, instance_pool_id: str=None, **kwargs) -> None:
super(ManagedInstance, self).__init__(location=location, tags=tags, **kwargs)
self.identity = identity
self.sku = sku
self.managed_instance_create_mode = managed_instance_create_mode
self.fully_qualified_domain_name = None
self.administrator_login = administrator_login
self.administrator_login_password = administrator_login_password
self.subnet_id = subnet_id
self.state = None
self.license_type = license_type
self.v_cores = v_cores
self.storage_size_in_gb = storage_size_in_gb
self.collation = collation
self.dns_zone = None
self.dns_zone_partner = dns_zone_partner
self.public_data_endpoint_enabled = public_data_endpoint_enabled
self.source_managed_instance_id = source_managed_instance_id
self.restore_point_in_time = restore_point_in_time
self.proxy_override = proxy_override
self.timezone_id = timezone_id
self.instance_pool_id = instance_pool_id
class ManagedInstanceAdministrator(ProxyResource):
"""An Azure SQL managed instance administrator.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar administrator_type: Required. Type of the managed instance
administrator. Default value: "ActiveDirectory" .
:vartype administrator_type: str
:param login: Required. Login name of the managed instance administrator.
:type login: str
:param sid: Required. SID (object ID) of the managed instance
administrator.
:type sid: str
:param tenant_id: Tenant ID of the managed instance administrator.
:type tenant_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'administrator_type': {'required': True, 'constant': True},
'login': {'required': True},
'sid': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'administrator_type': {'key': 'properties.administratorType', 'type': 'str'},
'login': {'key': 'properties.login', 'type': 'str'},
'sid': {'key': 'properties.sid', 'type': 'str'},
'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
}
administrator_type = "ActiveDirectory"
def __init__(self, *, login: str, sid: str, tenant_id: str=None, **kwargs) -> None:
super(ManagedInstanceAdministrator, self).__init__(**kwargs)
self.login = login
self.sid = sid
self.tenant_id = tenant_id
class ManagedInstanceEditionCapability(Model):
"""The managed server capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The managed server version name.
:vartype name: str
:ivar supported_families: The supported families.
:vartype supported_families:
list[~azure.mgmt.sql.models.ManagedInstanceFamilyCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'supported_families': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_families': {'key': 'supportedFamilies', 'type': '[ManagedInstanceFamilyCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ManagedInstanceEditionCapability, self).__init__(**kwargs)
self.name = None
self.supported_families = None
self.status = None
self.reason = reason
class ManagedInstanceEncryptionProtector(ProxyResource):
"""The managed instance encryption protector.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Kind of encryption protector. This is metadata used for the
Azure portal experience.
:vartype kind: str
:param server_key_name: The name of the managed instance key.
:type server_key_name: str
:param server_key_type: Required. The encryption protector type like
'ServiceManaged', 'AzureKeyVault'. Possible values include:
'ServiceManaged', 'AzureKeyVault'
:type server_key_type: str or ~azure.mgmt.sql.models.ServerKeyType
:ivar uri: The URI of the server key.
:vartype uri: str
:ivar thumbprint: Thumbprint of the server key.
:vartype thumbprint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'server_key_type': {'required': True},
'uri': {'readonly': True},
'thumbprint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'server_key_name': {'key': 'properties.serverKeyName', 'type': 'str'},
'server_key_type': {'key': 'properties.serverKeyType', 'type': 'str'},
'uri': {'key': 'properties.uri', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
}
def __init__(self, *, server_key_type, server_key_name: str=None, **kwargs) -> None:
super(ManagedInstanceEncryptionProtector, self).__init__(**kwargs)
self.kind = None
self.server_key_name = server_key_name
self.server_key_type = server_key_type
self.uri = None
self.thumbprint = None
class ManagedInstanceFamilyCapability(Model):
"""The managed server family capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: Family name.
:vartype name: str
:ivar sku: SKU name.
:vartype sku: str
:ivar supported_license_types: List of supported license types.
:vartype supported_license_types:
list[~azure.mgmt.sql.models.LicenseTypeCapability]
:ivar supported_vcores_values: List of supported virtual cores values.
:vartype supported_vcores_values:
list[~azure.mgmt.sql.models.ManagedInstanceVcoresCapability]
:ivar included_max_size: Included size.
:vartype included_max_size: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar supported_storage_sizes: Storage size ranges.
:vartype supported_storage_sizes:
list[~azure.mgmt.sql.models.MaxSizeRangeCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'sku': {'readonly': True},
'supported_license_types': {'readonly': True},
'supported_vcores_values': {'readonly': True},
'included_max_size': {'readonly': True},
'supported_storage_sizes': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'str'},
'supported_license_types': {'key': 'supportedLicenseTypes', 'type': '[LicenseTypeCapability]'},
'supported_vcores_values': {'key': 'supportedVcoresValues', 'type': '[ManagedInstanceVcoresCapability]'},
'included_max_size': {'key': 'includedMaxSize', 'type': 'MaxSizeCapability'},
'supported_storage_sizes': {'key': 'supportedStorageSizes', 'type': '[MaxSizeRangeCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ManagedInstanceFamilyCapability, self).__init__(**kwargs)
self.name = None
self.sku = None
self.supported_license_types = None
self.supported_vcores_values = None
self.included_max_size = None
self.supported_storage_sizes = None
self.status = None
self.reason = reason
class ManagedInstanceKey(ProxyResource):
"""A managed instance key.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Kind of encryption protector. This is metadata used for the
Azure portal experience.
:vartype kind: str
:param server_key_type: Required. The key type like 'ServiceManaged',
'AzureKeyVault'. Possible values include: 'ServiceManaged',
'AzureKeyVault'
:type server_key_type: str or ~azure.mgmt.sql.models.ServerKeyType
:param uri: The URI of the key. If the ServerKeyType is AzureKeyVault,
then the URI is required.
:type uri: str
:ivar thumbprint: Thumbprint of the key.
:vartype thumbprint: str
:ivar creation_date: The key creation date.
:vartype creation_date: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'server_key_type': {'required': True},
'thumbprint': {'readonly': True},
'creation_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'server_key_type': {'key': 'properties.serverKeyType', 'type': 'str'},
'uri': {'key': 'properties.uri', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
}
def __init__(self, *, server_key_type, uri: str=None, **kwargs) -> None:
super(ManagedInstanceKey, self).__init__(**kwargs)
self.kind = None
self.server_key_type = server_key_type
self.uri = uri
self.thumbprint = None
self.creation_date = None
class ManagedInstancePairInfo(Model):
"""Pairs of Managed Instances in the failover group.
:param primary_managed_instance_id: Id of Primary Managed Instance in
pair.
:type primary_managed_instance_id: str
:param partner_managed_instance_id: Id of Partner Managed Instance in
pair.
:type partner_managed_instance_id: str
"""
_attribute_map = {
'primary_managed_instance_id': {'key': 'primaryManagedInstanceId', 'type': 'str'},
'partner_managed_instance_id': {'key': 'partnerManagedInstanceId', 'type': 'str'},
}
def __init__(self, *, primary_managed_instance_id: str=None, partner_managed_instance_id: str=None, **kwargs) -> None:
super(ManagedInstancePairInfo, self).__init__(**kwargs)
self.primary_managed_instance_id = primary_managed_instance_id
self.partner_managed_instance_id = partner_managed_instance_id
class ManagedInstanceUpdate(Model):
"""An update request for an Azure SQL Database managed instance.
Variables are only populated by the server, and will be ignored when
sending a request.
:param sku: Managed instance sku
:type sku: ~azure.mgmt.sql.models.Sku
:param managed_instance_create_mode: Specifies the mode of database
creation.
Default: Regular instance creation.
Restore: Creates an instance by restoring a set of backups to specific
point in time. RestorePointInTime and SourceManagedInstanceId must be
specified. Possible values include: 'Default', 'PointInTimeRestore'
:type managed_instance_create_mode: str or
~azure.mgmt.sql.models.ManagedServerCreateMode
:ivar fully_qualified_domain_name: The fully qualified domain name of the
managed instance.
:vartype fully_qualified_domain_name: str
:param administrator_login: Administrator username for the managed
instance. Can only be specified when the managed instance is being created
(and is required for creation).
:type administrator_login: str
:param administrator_login_password: The administrator login password
(required for managed instance creation).
:type administrator_login_password: str
:param subnet_id: Subnet resource ID for the managed instance.
:type subnet_id: str
:ivar state: The state of the managed instance.
:vartype state: str
:param license_type: The license type. Possible values are
'LicenseIncluded' (regular price inclusive of a new SQL license) and
'BasePrice' (discounted AHB price for bringing your own SQL licenses).
Possible values include: 'LicenseIncluded', 'BasePrice'
:type license_type: str or
~azure.mgmt.sql.models.ManagedInstanceLicenseType
:param v_cores: The number of vCores. Allowed values: 8, 16, 24, 32, 40,
64, 80.
:type v_cores: int
:param storage_size_in_gb: Storage size in GB. Minimum value: 32. Maximum
value: 8192. Increments of 32 GB allowed only.
:type storage_size_in_gb: int
:param collation: Collation of the managed instance.
:type collation: str
:ivar dns_zone: The Dns Zone that the managed instance is in.
:vartype dns_zone: str
:param dns_zone_partner: The resource id of another managed instance whose
DNS zone this managed instance will share after creation.
:type dns_zone_partner: str
:param public_data_endpoint_enabled: Whether or not the public data
endpoint is enabled.
:type public_data_endpoint_enabled: bool
:param source_managed_instance_id: The resource identifier of the source
managed instance associated with create operation of this instance.
:type source_managed_instance_id: str
:param restore_point_in_time: Specifies the point in time (ISO8601 format)
of the source database that will be restored to create the new database.
:type restore_point_in_time: datetime
:param proxy_override: Connection type used for connecting to the
instance. Possible values include: 'Proxy', 'Redirect', 'Default'
:type proxy_override: str or
~azure.mgmt.sql.models.ManagedInstanceProxyOverride
:param timezone_id: Id of the timezone. Allowed values are timezones
supported by Windows.
Windows keeps details on supported timezones, including the id, in
registry under
KEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Time
Zones.
You can get those registry values via SQL Server by querying SELECT name
AS timezone_id FROM sys.time_zone_info.
List of Ids can also be obtained by executing
[System.TimeZoneInfo]::GetSystemTimeZones() in PowerShell.
An example of valid timezone id is "Pacific Standard Time" or "W. Europe
Standard Time".
:type timezone_id: str
:param instance_pool_id: The Id of the instance pool this managed server
belongs to.
:type instance_pool_id: str
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'fully_qualified_domain_name': {'readonly': True},
'state': {'readonly': True},
'dns_zone': {'readonly': True},
}
_attribute_map = {
'sku': {'key': 'sku', 'type': 'Sku'},
'managed_instance_create_mode': {'key': 'properties.managedInstanceCreateMode', 'type': 'str'},
'fully_qualified_domain_name': {'key': 'properties.fullyQualifiedDomainName', 'type': 'str'},
'administrator_login': {'key': 'properties.administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'properties.administratorLoginPassword', 'type': 'str'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'license_type': {'key': 'properties.licenseType', 'type': 'str'},
'v_cores': {'key': 'properties.vCores', 'type': 'int'},
'storage_size_in_gb': {'key': 'properties.storageSizeInGB', 'type': 'int'},
'collation': {'key': 'properties.collation', 'type': 'str'},
'dns_zone': {'key': 'properties.dnsZone', 'type': 'str'},
'dns_zone_partner': {'key': 'properties.dnsZonePartner', 'type': 'str'},
'public_data_endpoint_enabled': {'key': 'properties.publicDataEndpointEnabled', 'type': 'bool'},
'source_managed_instance_id': {'key': 'properties.sourceManagedInstanceId', 'type': 'str'},
'restore_point_in_time': {'key': 'properties.restorePointInTime', 'type': 'iso-8601'},
'proxy_override': {'key': 'properties.proxyOverride', 'type': 'str'},
'timezone_id': {'key': 'properties.timezoneId', 'type': 'str'},
'instance_pool_id': {'key': 'properties.instancePoolId', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, sku=None, managed_instance_create_mode=None, administrator_login: str=None, administrator_login_password: str=None, subnet_id: str=None, license_type=None, v_cores: int=None, storage_size_in_gb: int=None, collation: str=None, dns_zone_partner: str=None, public_data_endpoint_enabled: bool=None, source_managed_instance_id: str=None, restore_point_in_time=None, proxy_override=None, timezone_id: str=None, instance_pool_id: str=None, tags=None, **kwargs) -> None:
super(ManagedInstanceUpdate, self).__init__(**kwargs)
self.sku = sku
self.managed_instance_create_mode = managed_instance_create_mode
self.fully_qualified_domain_name = None
self.administrator_login = administrator_login
self.administrator_login_password = administrator_login_password
self.subnet_id = subnet_id
self.state = None
self.license_type = license_type
self.v_cores = v_cores
self.storage_size_in_gb = storage_size_in_gb
self.collation = collation
self.dns_zone = None
self.dns_zone_partner = dns_zone_partner
self.public_data_endpoint_enabled = public_data_endpoint_enabled
self.source_managed_instance_id = source_managed_instance_id
self.restore_point_in_time = restore_point_in_time
self.proxy_override = proxy_override
self.timezone_id = timezone_id
self.instance_pool_id = instance_pool_id
self.tags = tags
class ManagedInstanceVcoresCapability(Model):
"""The managed instance virtual cores capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The virtual cores identifier.
:vartype name: str
:ivar value: The virtual cores value.
:vartype value: int
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'value': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'int'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ManagedInstanceVcoresCapability, self).__init__(**kwargs)
self.name = None
self.value = None
self.status = None
self.reason = reason
class ManagedInstanceVersionCapability(Model):
"""The managed instance capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The server version name.
:vartype name: str
:ivar supported_editions: The list of supported managed instance editions.
:vartype supported_editions:
list[~azure.mgmt.sql.models.ManagedInstanceEditionCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'supported_editions': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_editions': {'key': 'supportedEditions', 'type': '[ManagedInstanceEditionCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ManagedInstanceVersionCapability, self).__init__(**kwargs)
self.name = None
self.supported_editions = None
self.status = None
self.reason = reason
class ManagedInstanceVulnerabilityAssessment(ProxyResource):
"""A managed instance vulnerability assessment.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param storage_container_path: Required. A blob storage container path to
hold the scan results (e.g.
https://myStorage.blob.core.windows.net/VaScans/).
:type storage_container_path: str
:param storage_container_sas_key: A shared access signature (SAS Key) that
has write access to the blob container specified in 'storageContainerPath'
parameter. If 'storageAccountAccessKey' isn't specified,
StorageContainerSasKey is required.
:type storage_container_sas_key: str
:param storage_account_access_key: Specifies the identifier key of the
storage account for vulnerability assessment scan results. If
'StorageContainerSasKey' isn't specified, storageAccountAccessKey is
required.
:type storage_account_access_key: str
:param recurring_scans: The recurring scans settings
:type recurring_scans:
~azure.mgmt.sql.models.VulnerabilityAssessmentRecurringScansProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'storage_container_path': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_container_path': {'key': 'properties.storageContainerPath', 'type': 'str'},
'storage_container_sas_key': {'key': 'properties.storageContainerSasKey', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'recurring_scans': {'key': 'properties.recurringScans', 'type': 'VulnerabilityAssessmentRecurringScansProperties'},
}
def __init__(self, *, storage_container_path: str, storage_container_sas_key: str=None, storage_account_access_key: str=None, recurring_scans=None, **kwargs) -> None:
super(ManagedInstanceVulnerabilityAssessment, self).__init__(**kwargs)
self.storage_container_path = storage_container_path
self.storage_container_sas_key = storage_container_sas_key
self.storage_account_access_key = storage_account_access_key
self.recurring_scans = recurring_scans
class ManagedServerSecurityAlertPolicy(ProxyResource):
"""A managed server security alert policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param state: Required. Specifies the state of the policy, whether it is
enabled or disabled or a policy has not been applied yet on the specific
database. Possible values include: 'New', 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.SecurityAlertPolicyState
:param disabled_alerts: Specifies an array of alerts that are disabled.
Allowed values are: Sql_Injection, Sql_Injection_Vulnerability,
Access_Anomaly, Data_Exfiltration, Unsafe_Action
:type disabled_alerts: list[str]
:param email_addresses: Specifies an array of e-mail addresses to which
the alert is sent.
:type email_addresses: list[str]
:param email_account_admins: Specifies that the alert is sent to the
account administrators.
:type email_account_admins: bool
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). This blob storage will hold all
Threat Detection audit logs.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
Threat Detection audit storage account.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the Threat
Detection audit logs.
:type retention_days: int
:ivar creation_time: Specifies the UTC creation time of the policy.
:vartype creation_time: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
'creation_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'SecurityAlertPolicyState'},
'disabled_alerts': {'key': 'properties.disabledAlerts', 'type': '[str]'},
'email_addresses': {'key': 'properties.emailAddresses', 'type': '[str]'},
'email_account_admins': {'key': 'properties.emailAccountAdmins', 'type': 'bool'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
}
def __init__(self, *, state, disabled_alerts=None, email_addresses=None, email_account_admins: bool=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, **kwargs) -> None:
super(ManagedServerSecurityAlertPolicy, self).__init__(**kwargs)
self.state = state
self.disabled_alerts = disabled_alerts
self.email_addresses = email_addresses
self.email_account_admins = email_account_admins
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.creation_time = None
class MaxSizeCapability(Model):
"""The maximum size capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar limit: The maximum size limit (see 'unit' for the units).
:vartype limit: int
:ivar unit: The units that the limit is expressed in. Possible values
include: 'Megabytes', 'Gigabytes', 'Terabytes', 'Petabytes'
:vartype unit: str or ~azure.mgmt.sql.models.MaxSizeUnit
"""
_validation = {
'limit': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'limit': {'key': 'limit', 'type': 'int'},
'unit': {'key': 'unit', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(MaxSizeCapability, self).__init__(**kwargs)
self.limit = None
self.unit = None
class MaxSizeRangeCapability(Model):
"""The maximum size range capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar min_value: Minimum value.
:vartype min_value: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar max_value: Maximum value.
:vartype max_value: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar scale_size: Scale/step size for discrete values between the minimum
value and the maximum value.
:vartype scale_size: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar log_size: Size of transaction log.
:vartype log_size: ~azure.mgmt.sql.models.LogSizeCapability
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'min_value': {'readonly': True},
'max_value': {'readonly': True},
'scale_size': {'readonly': True},
'log_size': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'min_value': {'key': 'minValue', 'type': 'MaxSizeCapability'},
'max_value': {'key': 'maxValue', 'type': 'MaxSizeCapability'},
'scale_size': {'key': 'scaleSize', 'type': 'MaxSizeCapability'},
'log_size': {'key': 'logSize', 'type': 'LogSizeCapability'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(MaxSizeRangeCapability, self).__init__(**kwargs)
self.min_value = None
self.max_value = None
self.scale_size = None
self.log_size = None
self.status = None
self.reason = reason
class Metric(Model):
"""Database metrics.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar start_time: The start time for the metric (ISO-8601 format).
:vartype start_time: datetime
:ivar end_time: The end time for the metric (ISO-8601 format).
:vartype end_time: datetime
:ivar time_grain: The time step to be used to summarize the metric values.
:vartype time_grain: str
:ivar unit: The unit of the metric. Possible values include: 'count',
'bytes', 'seconds', 'percent', 'countPerSecond', 'bytesPerSecond'
:vartype unit: str or ~azure.mgmt.sql.models.UnitType
:ivar name: The name information for the metric.
:vartype name: ~azure.mgmt.sql.models.MetricName
:ivar metric_values: The metric values for the specified time window and
timestep.
:vartype metric_values: list[~azure.mgmt.sql.models.MetricValue]
"""
_validation = {
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'time_grain': {'readonly': True},
'unit': {'readonly': True},
'name': {'readonly': True},
'metric_values': {'readonly': True},
}
_attribute_map = {
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'name': {'key': 'name', 'type': 'MetricName'},
'metric_values': {'key': 'metricValues', 'type': '[MetricValue]'},
}
def __init__(self, **kwargs) -> None:
super(Metric, self).__init__(**kwargs)
self.start_time = None
self.end_time = None
self.time_grain = None
self.unit = None
self.name = None
self.metric_values = None
class MetricAvailability(Model):
"""A metric availability value.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar retention: The length of retention for the database metric.
:vartype retention: str
:ivar time_grain: The granularity of the database metric.
:vartype time_grain: str
"""
_validation = {
'retention': {'readonly': True},
'time_grain': {'readonly': True},
}
_attribute_map = {
'retention': {'key': 'retention', 'type': 'str'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(MetricAvailability, self).__init__(**kwargs)
self.retention = None
self.time_grain = None
class MetricDefinition(Model):
"""A database metric definition.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The name information for the metric.
:vartype name: ~azure.mgmt.sql.models.MetricName
:ivar primary_aggregation_type: The primary aggregation type defining how
metric values are displayed. Possible values include: 'None', 'Average',
'Count', 'Minimum', 'Maximum', 'Total'
:vartype primary_aggregation_type: str or
~azure.mgmt.sql.models.PrimaryAggregationType
:ivar resource_uri: The resource uri of the database.
:vartype resource_uri: str
:ivar unit: The unit of the metric. Possible values include: 'Count',
'Bytes', 'Seconds', 'Percent', 'CountPerSecond', 'BytesPerSecond'
:vartype unit: str or ~azure.mgmt.sql.models.UnitDefinitionType
:ivar metric_availabilities: The list of database metric availabilities
for the metric.
:vartype metric_availabilities:
list[~azure.mgmt.sql.models.MetricAvailability]
"""
_validation = {
'name': {'readonly': True},
'primary_aggregation_type': {'readonly': True},
'resource_uri': {'readonly': True},
'unit': {'readonly': True},
'metric_availabilities': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'MetricName'},
'primary_aggregation_type': {'key': 'primaryAggregationType', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'metric_availabilities': {'key': 'metricAvailabilities', 'type': '[MetricAvailability]'},
}
def __init__(self, **kwargs) -> None:
super(MetricDefinition, self).__init__(**kwargs)
self.name = None
self.primary_aggregation_type = None
self.resource_uri = None
self.unit = None
self.metric_availabilities = None
class MetricName(Model):
"""A database metric name.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar value: The name of the database metric.
:vartype value: str
:ivar localized_value: The friendly name of the database metric.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(MetricName, self).__init__(**kwargs)
self.value = None
self.localized_value = None
class MetricValue(Model):
"""Represents database metrics.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar count: The number of values for the metric.
:vartype count: float
:ivar average: The average value of the metric.
:vartype average: float
:ivar maximum: The max value of the metric.
:vartype maximum: float
:ivar minimum: The min value of the metric.
:vartype minimum: float
:ivar timestamp: The metric timestamp (ISO-8601 format).
:vartype timestamp: datetime
:ivar total: The total value of the metric.
:vartype total: float
"""
_validation = {
'count': {'readonly': True},
'average': {'readonly': True},
'maximum': {'readonly': True},
'minimum': {'readonly': True},
'timestamp': {'readonly': True},
'total': {'readonly': True},
}
_attribute_map = {
'count': {'key': 'count', 'type': 'float'},
'average': {'key': 'average', 'type': 'float'},
'maximum': {'key': 'maximum', 'type': 'float'},
'minimum': {'key': 'minimum', 'type': 'float'},
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'total': {'key': 'total', 'type': 'float'},
}
def __init__(self, **kwargs) -> None:
super(MetricValue, self).__init__(**kwargs)
self.count = None
self.average = None
self.maximum = None
self.minimum = None
self.timestamp = None
self.total = None
class Name(Model):
"""ARM Usage Name.
:param value: Usage name value
:type value: str
:param localized_value: Usage name localized value.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(self, *, value: str=None, localized_value: str=None, **kwargs) -> None:
super(Name, self).__init__(**kwargs)
self.value = value
self.localized_value = localized_value
class Operation(Model):
"""SQL REST API operation definition.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The name of the operation being performed on this particular
object.
:vartype name: str
:ivar display: The localized display information for this particular
operation / action.
:vartype display: ~azure.mgmt.sql.models.OperationDisplay
:ivar origin: The intended executor of the operation. Possible values
include: 'user', 'system'
:vartype origin: str or ~azure.mgmt.sql.models.OperationOrigin
:ivar properties: Additional descriptions for the operation.
:vartype properties: dict[str, object]
"""
_validation = {
'name': {'readonly': True},
'display': {'readonly': True},
'origin': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
'origin': {'key': 'origin', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{object}'},
}
def __init__(self, **kwargs) -> None:
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = None
self.origin = None
self.properties = None
class OperationDisplay(Model):
"""Display metadata associated with the operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar provider: The localized friendly form of the resource provider name.
:vartype provider: str
:ivar resource: The localized friendly form of the resource type related
to this action/operation.
:vartype resource: str
:ivar operation: The localized friendly name for the operation.
:vartype operation: str
:ivar description: The localized friendly description for the operation.
:vartype description: str
"""
_validation = {
'provider': {'readonly': True},
'resource': {'readonly': True},
'operation': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(OperationDisplay, self).__init__(**kwargs)
self.provider = None
self.resource = None
self.operation = None
self.description = None
class OperationImpact(Model):
"""The impact of an operation, both in absolute and relative terms.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The name of the impact dimension.
:vartype name: str
:ivar unit: The unit in which estimated impact to dimension is measured.
:vartype unit: str
:ivar change_value_absolute: The absolute impact to dimension.
:vartype change_value_absolute: float
:ivar change_value_relative: The relative impact to dimension (null if not
applicable)
:vartype change_value_relative: float
"""
_validation = {
'name': {'readonly': True},
'unit': {'readonly': True},
'change_value_absolute': {'readonly': True},
'change_value_relative': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'change_value_absolute': {'key': 'changeValueAbsolute', 'type': 'float'},
'change_value_relative': {'key': 'changeValueRelative', 'type': 'float'},
}
def __init__(self, **kwargs) -> None:
super(OperationImpact, self).__init__(**kwargs)
self.name = None
self.unit = None
self.change_value_absolute = None
self.change_value_relative = None
class PartnerInfo(Model):
"""Partner server information for the failover group.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Required. Resource identifier of the partner server.
:type id: str
:ivar location: Geo location of the partner server.
:vartype location: str
:ivar replication_role: Replication role of the partner server. Possible
values include: 'Primary', 'Secondary'
:vartype replication_role: str or
~azure.mgmt.sql.models.FailoverGroupReplicationRole
"""
_validation = {
'id': {'required': True},
'location': {'readonly': True},
'replication_role': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'replication_role': {'key': 'replicationRole', 'type': 'str'},
}
def __init__(self, *, id: str, **kwargs) -> None:
super(PartnerInfo, self).__init__(**kwargs)
self.id = id
self.location = None
self.replication_role = None
class PartnerRegionInfo(Model):
"""Partner region information for the failover group.
Variables are only populated by the server, and will be ignored when
sending a request.
:param location: Geo location of the partner managed instances.
:type location: str
:ivar replication_role: Replication role of the partner managed instances.
Possible values include: 'Primary', 'Secondary'
:vartype replication_role: str or
~azure.mgmt.sql.models.InstanceFailoverGroupReplicationRole
"""
_validation = {
'replication_role': {'readonly': True},
}
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'replication_role': {'key': 'replicationRole', 'type': 'str'},
}
def __init__(self, *, location: str=None, **kwargs) -> None:
super(PartnerRegionInfo, self).__init__(**kwargs)
self.location = location
self.replication_role = None
class PerformanceLevelCapability(Model):
"""The performance level capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar value: Performance level value.
:vartype value: float
:ivar unit: Unit type used to measure performance level. Possible values
include: 'DTU', 'VCores'
:vartype unit: str or ~azure.mgmt.sql.models.PerformanceLevelUnit
"""
_validation = {
'value': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'float'},
'unit': {'key': 'unit', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(PerformanceLevelCapability, self).__init__(**kwargs)
self.value = None
self.unit = None
class PrivateEndpointConnection(ProxyResource):
"""A private endpoint connection.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param private_endpoint: Private endpoint which the connection belongs to.
:type private_endpoint: ~azure.mgmt.sql.models.PrivateEndpointProperty
:param private_link_service_connection_state: Connection state of the
private endpoint connection.
:type private_link_service_connection_state:
~azure.mgmt.sql.models.PrivateLinkServiceConnectionStateProperty
:ivar provisioning_state: State of the private endpoint connection.
:vartype provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpointProperty'},
'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionStateProperty'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(self, *, private_endpoint=None, private_link_service_connection_state=None, **kwargs) -> None:
super(PrivateEndpointConnection, self).__init__(**kwargs)
self.private_endpoint = private_endpoint
self.private_link_service_connection_state = private_link_service_connection_state
self.provisioning_state = None
class PrivateEndpointProperty(Model):
"""PrivateEndpointProperty.
:param id: Resource id of the private endpoint.
:type id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, *, id: str=None, **kwargs) -> None:
super(PrivateEndpointProperty, self).__init__(**kwargs)
self.id = id
class PrivateLinkResource(ProxyResource):
"""A private link resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar properties: The private link resource group id.
:vartype properties: ~azure.mgmt.sql.models.PrivateLinkResourceProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': 'PrivateLinkResourceProperties'},
}
def __init__(self, **kwargs) -> None:
super(PrivateLinkResource, self).__init__(**kwargs)
self.properties = None
class PrivateLinkResourceProperties(Model):
"""Properties of a private link resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar group_id: The private link resource group id.
:vartype group_id: str
:ivar required_members: The private link resource required member names.
:vartype required_members: list[str]
"""
_validation = {
'group_id': {'readonly': True},
'required_members': {'readonly': True},
}
_attribute_map = {
'group_id': {'key': 'groupId', 'type': 'str'},
'required_members': {'key': 'requiredMembers', 'type': '[str]'},
}
def __init__(self, **kwargs) -> None:
super(PrivateLinkResourceProperties, self).__init__(**kwargs)
self.group_id = None
self.required_members = None
class PrivateLinkServiceConnectionStateProperty(Model):
"""PrivateLinkServiceConnectionStateProperty.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:param status: Required. The private link service connection status.
:type status: str
:param description: Required. The private link service connection
description.
:type description: str
:ivar actions_required: The actions required for private link service
connection.
:vartype actions_required: str
"""
_validation = {
'status': {'required': True},
'description': {'required': True},
'actions_required': {'readonly': True},
}
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
'actions_required': {'key': 'actionsRequired', 'type': 'str'},
}
def __init__(self, *, status: str, description: str, **kwargs) -> None:
super(PrivateLinkServiceConnectionStateProperty, self).__init__(**kwargs)
self.status = status
self.description = description
self.actions_required = None
class RecommendedElasticPool(ProxyResource):
"""Represents a recommended elastic pool.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar database_edition: The edition of the recommended elastic pool. The
ElasticPoolEdition enumeration contains all the valid editions. Possible
values include: 'Basic', 'Standard', 'Premium', 'GeneralPurpose',
'BusinessCritical'
:vartype database_edition: str or
~azure.mgmt.sql.models.ElasticPoolEdition
:param dtu: The DTU for the recommended elastic pool.
:type dtu: float
:param database_dtu_min: The minimum DTU for the database.
:type database_dtu_min: float
:param database_dtu_max: The maximum DTU for the database.
:type database_dtu_max: float
:param storage_mb: Gets storage size in megabytes.
:type storage_mb: float
:ivar observation_period_start: The observation period start (ISO8601
format).
:vartype observation_period_start: datetime
:ivar observation_period_end: The observation period start (ISO8601
format).
:vartype observation_period_end: datetime
:ivar max_observed_dtu: Gets maximum observed DTU.
:vartype max_observed_dtu: float
:ivar max_observed_storage_mb: Gets maximum observed storage in megabytes.
:vartype max_observed_storage_mb: float
:ivar databases: The list of databases in this pool. Expanded property
:vartype databases: list[~azure.mgmt.sql.models.TrackedResource]
:ivar metrics: The list of databases housed in the server. Expanded
property
:vartype metrics:
list[~azure.mgmt.sql.models.RecommendedElasticPoolMetric]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'database_edition': {'readonly': True},
'observation_period_start': {'readonly': True},
'observation_period_end': {'readonly': True},
'max_observed_dtu': {'readonly': True},
'max_observed_storage_mb': {'readonly': True},
'databases': {'readonly': True},
'metrics': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'database_edition': {'key': 'properties.databaseEdition', 'type': 'str'},
'dtu': {'key': 'properties.dtu', 'type': 'float'},
'database_dtu_min': {'key': 'properties.databaseDtuMin', 'type': 'float'},
'database_dtu_max': {'key': 'properties.databaseDtuMax', 'type': 'float'},
'storage_mb': {'key': 'properties.storageMB', 'type': 'float'},
'observation_period_start': {'key': 'properties.observationPeriodStart', 'type': 'iso-8601'},
'observation_period_end': {'key': 'properties.observationPeriodEnd', 'type': 'iso-8601'},
'max_observed_dtu': {'key': 'properties.maxObservedDtu', 'type': 'float'},
'max_observed_storage_mb': {'key': 'properties.maxObservedStorageMB', 'type': 'float'},
'databases': {'key': 'properties.databases', 'type': '[TrackedResource]'},
'metrics': {'key': 'properties.metrics', 'type': '[RecommendedElasticPoolMetric]'},
}
def __init__(self, *, dtu: float=None, database_dtu_min: float=None, database_dtu_max: float=None, storage_mb: float=None, **kwargs) -> None:
super(RecommendedElasticPool, self).__init__(**kwargs)
self.database_edition = None
self.dtu = dtu
self.database_dtu_min = database_dtu_min
self.database_dtu_max = database_dtu_max
self.storage_mb = storage_mb
self.observation_period_start = None
self.observation_period_end = None
self.max_observed_dtu = None
self.max_observed_storage_mb = None
self.databases = None
self.metrics = None
class RecommendedElasticPoolMetric(Model):
"""Represents recommended elastic pool metric.
:param date_time_property: The time of metric (ISO8601 format).
:type date_time_property: datetime
:param dtu: Gets or sets the DTUs (Database Transaction Units). See
https://azure.microsoft.com/documentation/articles/sql-database-what-is-a-dtu/
:type dtu: float
:param size_gb: Gets or sets size in gigabytes.
:type size_gb: float
"""
_attribute_map = {
'date_time_property': {'key': 'dateTime', 'type': 'iso-8601'},
'dtu': {'key': 'dtu', 'type': 'float'},
'size_gb': {'key': 'sizeGB', 'type': 'float'},
}
def __init__(self, *, date_time_property=None, dtu: float=None, size_gb: float=None, **kwargs) -> None:
super(RecommendedElasticPoolMetric, self).__init__(**kwargs)
self.date_time_property = date_time_property
self.dtu = dtu
self.size_gb = size_gb
class RecommendedIndex(ProxyResource):
"""Represents a database recommended index.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar action: The proposed index action. You can create a missing index,
drop an unused index, or rebuild an existing index to improve its
performance. Possible values include: 'Create', 'Drop', 'Rebuild'
:vartype action: str or ~azure.mgmt.sql.models.RecommendedIndexAction
:ivar state: The current recommendation state. Possible values include:
'Active', 'Pending', 'Executing', 'Verifying', 'Pending Revert',
'Reverting', 'Reverted', 'Ignored', 'Expired', 'Blocked', 'Success'
:vartype state: str or ~azure.mgmt.sql.models.RecommendedIndexState
:ivar created: The UTC datetime showing when this resource was created
(ISO8601 format).
:vartype created: datetime
:ivar last_modified: The UTC datetime of when was this resource last
changed (ISO8601 format).
:vartype last_modified: datetime
:ivar index_type: The type of index (CLUSTERED, NONCLUSTERED, COLUMNSTORE,
CLUSTERED COLUMNSTORE). Possible values include: 'CLUSTERED',
'NONCLUSTERED', 'COLUMNSTORE', 'CLUSTERED COLUMNSTORE'
:vartype index_type: str or ~azure.mgmt.sql.models.RecommendedIndexType
:ivar schema: The schema where table to build index over resides
:vartype schema: str
:ivar table: The table on which to build index.
:vartype table: str
:ivar columns: Columns over which to build index
:vartype columns: list[str]
:ivar included_columns: The list of column names to be included in the
index
:vartype included_columns: list[str]
:ivar index_script: The full build index script
:vartype index_script: str
:ivar estimated_impact: The estimated impact of doing recommended index
action.
:vartype estimated_impact: list[~azure.mgmt.sql.models.OperationImpact]
:ivar reported_impact: The values reported after index action is complete.
:vartype reported_impact: list[~azure.mgmt.sql.models.OperationImpact]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'action': {'readonly': True},
'state': {'readonly': True},
'created': {'readonly': True},
'last_modified': {'readonly': True},
'index_type': {'readonly': True},
'schema': {'readonly': True},
'table': {'readonly': True},
'columns': {'readonly': True},
'included_columns': {'readonly': True},
'index_script': {'readonly': True},
'estimated_impact': {'readonly': True},
'reported_impact': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'action': {'key': 'properties.action', 'type': 'RecommendedIndexAction'},
'state': {'key': 'properties.state', 'type': 'RecommendedIndexState'},
'created': {'key': 'properties.created', 'type': 'iso-8601'},
'last_modified': {'key': 'properties.lastModified', 'type': 'iso-8601'},
'index_type': {'key': 'properties.indexType', 'type': 'RecommendedIndexType'},
'schema': {'key': 'properties.schema', 'type': 'str'},
'table': {'key': 'properties.table', 'type': 'str'},
'columns': {'key': 'properties.columns', 'type': '[str]'},
'included_columns': {'key': 'properties.includedColumns', 'type': '[str]'},
'index_script': {'key': 'properties.indexScript', 'type': 'str'},
'estimated_impact': {'key': 'properties.estimatedImpact', 'type': '[OperationImpact]'},
'reported_impact': {'key': 'properties.reportedImpact', 'type': '[OperationImpact]'},
}
def __init__(self, **kwargs) -> None:
super(RecommendedIndex, self).__init__(**kwargs)
self.action = None
self.state = None
self.created = None
self.last_modified = None
self.index_type = None
self.schema = None
self.table = None
self.columns = None
self.included_columns = None
self.index_script = None
self.estimated_impact = None
self.reported_impact = None
class RecoverableDatabase(ProxyResource):
"""A recoverable database.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar edition: The edition of the database
:vartype edition: str
:ivar service_level_objective: The service level objective name of the
database
:vartype service_level_objective: str
:ivar elastic_pool_name: The elastic pool name of the database
:vartype elastic_pool_name: str
:ivar last_available_backup_date: The last available backup date of the
database (ISO8601 format)
:vartype last_available_backup_date: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'edition': {'readonly': True},
'service_level_objective': {'readonly': True},
'elastic_pool_name': {'readonly': True},
'last_available_backup_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'edition': {'key': 'properties.edition', 'type': 'str'},
'service_level_objective': {'key': 'properties.serviceLevelObjective', 'type': 'str'},
'elastic_pool_name': {'key': 'properties.elasticPoolName', 'type': 'str'},
'last_available_backup_date': {'key': 'properties.lastAvailableBackupDate', 'type': 'iso-8601'},
}
def __init__(self, **kwargs) -> None:
super(RecoverableDatabase, self).__init__(**kwargs)
self.edition = None
self.service_level_objective = None
self.elastic_pool_name = None
self.last_available_backup_date = None
class RecoverableManagedDatabase(ProxyResource):
"""A recoverable managed database resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar last_available_backup_date: The last available backup date.
:vartype last_available_backup_date: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'last_available_backup_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'last_available_backup_date': {'key': 'properties.lastAvailableBackupDate', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(RecoverableManagedDatabase, self).__init__(**kwargs)
self.last_available_backup_date = None
class ReplicationLink(ProxyResource):
"""Represents a database replication link.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Location of the server that contains this firewall rule.
:vartype location: str
:ivar is_termination_allowed: Legacy value indicating whether termination
is allowed. Currently always returns true.
:vartype is_termination_allowed: bool
:ivar replication_mode: Replication mode of this replication link.
:vartype replication_mode: str
:ivar partner_server: The name of the server hosting the partner database.
:vartype partner_server: str
:ivar partner_database: The name of the partner database.
:vartype partner_database: str
:ivar partner_location: The Azure Region of the partner database.
:vartype partner_location: str
:ivar role: The role of the database in the replication link. Possible
values include: 'Primary', 'Secondary', 'NonReadableSecondary', 'Source',
'Copy'
:vartype role: str or ~azure.mgmt.sql.models.ReplicationRole
:ivar partner_role: The role of the partner database in the replication
link. Possible values include: 'Primary', 'Secondary',
'NonReadableSecondary', 'Source', 'Copy'
:vartype partner_role: str or ~azure.mgmt.sql.models.ReplicationRole
:ivar start_time: The start time for the replication link.
:vartype start_time: datetime
:ivar percent_complete: The percentage of seeding complete for the
replication link.
:vartype percent_complete: int
:ivar replication_state: The replication state for the replication link.
Possible values include: 'PENDING', 'SEEDING', 'CATCH_UP', 'SUSPENDED'
:vartype replication_state: str or ~azure.mgmt.sql.models.ReplicationState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'is_termination_allowed': {'readonly': True},
'replication_mode': {'readonly': True},
'partner_server': {'readonly': True},
'partner_database': {'readonly': True},
'partner_location': {'readonly': True},
'role': {'readonly': True},
'partner_role': {'readonly': True},
'start_time': {'readonly': True},
'percent_complete': {'readonly': True},
'replication_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'is_termination_allowed': {'key': 'properties.isTerminationAllowed', 'type': 'bool'},
'replication_mode': {'key': 'properties.replicationMode', 'type': 'str'},
'partner_server': {'key': 'properties.partnerServer', 'type': 'str'},
'partner_database': {'key': 'properties.partnerDatabase', 'type': 'str'},
'partner_location': {'key': 'properties.partnerLocation', 'type': 'str'},
'role': {'key': 'properties.role', 'type': 'ReplicationRole'},
'partner_role': {'key': 'properties.partnerRole', 'type': 'ReplicationRole'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'int'},
'replication_state': {'key': 'properties.replicationState', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(ReplicationLink, self).__init__(**kwargs)
self.location = None
self.is_termination_allowed = None
self.replication_mode = None
self.partner_server = None
self.partner_database = None
self.partner_location = None
self.role = None
self.partner_role = None
self.start_time = None
self.percent_complete = None
self.replication_state = None
class ResourceIdentity(Model):
"""Azure Active Directory identity configuration for a resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar principal_id: The Azure Active Directory principal id.
:vartype principal_id: str
:param type: The identity type. Set this to 'SystemAssigned' in order to
automatically create and assign an Azure Active Directory principal for
the resource. Possible values include: 'SystemAssigned'
:type type: str or ~azure.mgmt.sql.models.IdentityType
:ivar tenant_id: The Azure Active Directory tenant id.
:vartype tenant_id: str
"""
_validation = {
'principal_id': {'readonly': True},
'tenant_id': {'readonly': True},
}
_attribute_map = {
'principal_id': {'key': 'principalId', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
}
def __init__(self, *, type=None, **kwargs) -> None:
super(ResourceIdentity, self).__init__(**kwargs)
self.principal_id = None
self.type = type
self.tenant_id = None
class ResourceMoveDefinition(Model):
"""Contains the information necessary to perform a resource move (rename).
All required parameters must be populated in order to send to Azure.
:param id: Required. The target ID for the resource
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, *, id: str, **kwargs) -> None:
super(ResourceMoveDefinition, self).__init__(**kwargs)
self.id = id
class RestorableDroppedDatabase(ProxyResource):
"""A restorable dropped database.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: The geo-location where the resource lives
:vartype location: str
:ivar database_name: The name of the database
:vartype database_name: str
:ivar edition: The edition of the database
:vartype edition: str
:ivar max_size_bytes: The max size in bytes of the database
:vartype max_size_bytes: str
:ivar service_level_objective: The service level objective name of the
database
:vartype service_level_objective: str
:ivar elastic_pool_name: The elastic pool name of the database
:vartype elastic_pool_name: str
:ivar creation_date: The creation date of the database (ISO8601 format)
:vartype creation_date: datetime
:ivar deletion_date: The deletion date of the database (ISO8601 format)
:vartype deletion_date: datetime
:ivar earliest_restore_date: The earliest restore date of the database
(ISO8601 format)
:vartype earliest_restore_date: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'database_name': {'readonly': True},
'edition': {'readonly': True},
'max_size_bytes': {'readonly': True},
'service_level_objective': {'readonly': True},
'elastic_pool_name': {'readonly': True},
'creation_date': {'readonly': True},
'deletion_date': {'readonly': True},
'earliest_restore_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'edition': {'key': 'properties.edition', 'type': 'str'},
'max_size_bytes': {'key': 'properties.maxSizeBytes', 'type': 'str'},
'service_level_objective': {'key': 'properties.serviceLevelObjective', 'type': 'str'},
'elastic_pool_name': {'key': 'properties.elasticPoolName', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'deletion_date': {'key': 'properties.deletionDate', 'type': 'iso-8601'},
'earliest_restore_date': {'key': 'properties.earliestRestoreDate', 'type': 'iso-8601'},
}
def __init__(self, **kwargs) -> None:
super(RestorableDroppedDatabase, self).__init__(**kwargs)
self.location = None
self.database_name = None
self.edition = None
self.max_size_bytes = None
self.service_level_objective = None
self.elastic_pool_name = None
self.creation_date = None
self.deletion_date = None
self.earliest_restore_date = None
class RestorableDroppedManagedDatabase(TrackedResource):
"""A restorable dropped managed database resource.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:ivar database_name: The name of the database.
:vartype database_name: str
:ivar creation_date: The creation date of the database (ISO8601 format).
:vartype creation_date: datetime
:ivar deletion_date: The deletion date of the database (ISO8601 format).
:vartype deletion_date: datetime
:ivar earliest_restore_date: The earliest restore date of the database
(ISO8601 format).
:vartype earliest_restore_date: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'database_name': {'readonly': True},
'creation_date': {'readonly': True},
'deletion_date': {'readonly': True},
'earliest_restore_date': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
'deletion_date': {'key': 'properties.deletionDate', 'type': 'iso-8601'},
'earliest_restore_date': {'key': 'properties.earliestRestoreDate', 'type': 'iso-8601'},
}
def __init__(self, *, location: str, tags=None, **kwargs) -> None:
super(RestorableDroppedManagedDatabase, self).__init__(location=location, tags=tags, **kwargs)
self.database_name = None
self.creation_date = None
self.deletion_date = None
self.earliest_restore_date = None
class RestorePoint(ProxyResource):
"""Database restore points.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar restore_point_type: The type of restore point. Possible values
include: 'CONTINUOUS', 'DISCRETE'
:vartype restore_point_type: str or
~azure.mgmt.sql.models.RestorePointType
:ivar earliest_restore_date: The earliest time to which this database can
be restored
:vartype earliest_restore_date: datetime
:ivar restore_point_creation_date: The time the backup was taken
:vartype restore_point_creation_date: datetime
:ivar restore_point_label: The label of restore point for backup request
by user
:vartype restore_point_label: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'restore_point_type': {'readonly': True},
'earliest_restore_date': {'readonly': True},
'restore_point_creation_date': {'readonly': True},
'restore_point_label': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'restore_point_type': {'key': 'properties.restorePointType', 'type': 'RestorePointType'},
'earliest_restore_date': {'key': 'properties.earliestRestoreDate', 'type': 'iso-8601'},
'restore_point_creation_date': {'key': 'properties.restorePointCreationDate', 'type': 'iso-8601'},
'restore_point_label': {'key': 'properties.restorePointLabel', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(RestorePoint, self).__init__(**kwargs)
self.location = None
self.restore_point_type = None
self.earliest_restore_date = None
self.restore_point_creation_date = None
self.restore_point_label = None
class SensitivityLabel(ProxyResource):
"""A sensitivity label.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param label_name: The label name.
:type label_name: str
:param label_id: The label ID.
:type label_id: str
:param information_type: The information type.
:type information_type: str
:param information_type_id: The information type ID.
:type information_type_id: str
:ivar is_disabled: Is sensitivity recommendation disabled. Applicable for
recommended sensitivity label only. Specifies whether the sensitivity
recommendation on this column is disabled (dismissed) or not.
:vartype is_disabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'is_disabled': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'label_name': {'key': 'properties.labelName', 'type': 'str'},
'label_id': {'key': 'properties.labelId', 'type': 'str'},
'information_type': {'key': 'properties.informationType', 'type': 'str'},
'information_type_id': {'key': 'properties.informationTypeId', 'type': 'str'},
'is_disabled': {'key': 'properties.isDisabled', 'type': 'bool'},
}
def __init__(self, *, label_name: str=None, label_id: str=None, information_type: str=None, information_type_id: str=None, **kwargs) -> None:
super(SensitivityLabel, self).__init__(**kwargs)
self.label_name = label_name
self.label_id = label_id
self.information_type = information_type
self.information_type_id = information_type_id
self.is_disabled = None
class Server(TrackedResource):
"""An Azure SQL Database server.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param identity: The Azure Active Directory identity of the server.
:type identity: ~azure.mgmt.sql.models.ResourceIdentity
:ivar kind: Kind of sql server. This is metadata used for the Azure portal
experience.
:vartype kind: str
:param administrator_login: Administrator username for the server. Once
created it cannot be changed.
:type administrator_login: str
:param administrator_login_password: The administrator login password
(required for server creation).
:type administrator_login_password: str
:param version: The version of the server.
:type version: str
:ivar state: The state of the server.
:vartype state: str
:ivar fully_qualified_domain_name: The fully qualified domain name of the
server.
:vartype fully_qualified_domain_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'kind': {'readonly': True},
'state': {'readonly': True},
'fully_qualified_domain_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ResourceIdentity'},
'kind': {'key': 'kind', 'type': 'str'},
'administrator_login': {'key': 'properties.administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'properties.administratorLoginPassword', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'fully_qualified_domain_name': {'key': 'properties.fullyQualifiedDomainName', 'type': 'str'},
}
def __init__(self, *, location: str, tags=None, identity=None, administrator_login: str=None, administrator_login_password: str=None, version: str=None, **kwargs) -> None:
super(Server, self).__init__(location=location, tags=tags, **kwargs)
self.identity = identity
self.kind = None
self.administrator_login = administrator_login
self.administrator_login_password = administrator_login_password
self.version = version
self.state = None
self.fully_qualified_domain_name = None
class ServerAutomaticTuning(ProxyResource):
"""Server-level Automatic Tuning.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param desired_state: Automatic tuning desired state. Possible values
include: 'Custom', 'Auto', 'Unspecified'
:type desired_state: str or
~azure.mgmt.sql.models.AutomaticTuningServerMode
:ivar actual_state: Automatic tuning actual state. Possible values
include: 'Custom', 'Auto', 'Unspecified'
:vartype actual_state: str or
~azure.mgmt.sql.models.AutomaticTuningServerMode
:param options: Automatic tuning options definition.
:type options: dict[str,
~azure.mgmt.sql.models.AutomaticTuningServerOptions]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'actual_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'desired_state': {'key': 'properties.desiredState', 'type': 'AutomaticTuningServerMode'},
'actual_state': {'key': 'properties.actualState', 'type': 'AutomaticTuningServerMode'},
'options': {'key': 'properties.options', 'type': '{AutomaticTuningServerOptions}'},
}
def __init__(self, *, desired_state=None, options=None, **kwargs) -> None:
super(ServerAutomaticTuning, self).__init__(**kwargs)
self.desired_state = desired_state
self.actual_state = None
self.options = options
class ServerAzureADAdministrator(ProxyResource):
"""An server Active Directory Administrator.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar administrator_type: Required. The type of administrator. Default
value: "ActiveDirectory" .
:vartype administrator_type: str
:param login: Required. The server administrator login value.
:type login: str
:param sid: Required. The server administrator Sid (Secure ID).
:type sid: str
:param tenant_id: Required. The server Active Directory Administrator
tenant id.
:type tenant_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'administrator_type': {'required': True, 'constant': True},
'login': {'required': True},
'sid': {'required': True},
'tenant_id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'administrator_type': {'key': 'properties.administratorType', 'type': 'str'},
'login': {'key': 'properties.login', 'type': 'str'},
'sid': {'key': 'properties.sid', 'type': 'str'},
'tenant_id': {'key': 'properties.tenantId', 'type': 'str'},
}
administrator_type = "ActiveDirectory"
def __init__(self, *, login: str, sid: str, tenant_id: str, **kwargs) -> None:
super(ServerAzureADAdministrator, self).__init__(**kwargs)
self.login = login
self.sid = sid
self.tenant_id = tenant_id
class ServerBlobAuditingPolicy(ProxyResource):
"""A server blob auditing policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param state: Required. Specifies the state of the policy. If state is
Enabled, storageEndpoint or isAzureMonitorTargetEnabled are required.
Possible values include: 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.BlobAuditingPolicyState
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). If state is Enabled,
storageEndpoint is required.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
auditing storage account. If state is Enabled and storageEndpoint is
specified, storageAccountAccessKey is required.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the audit
logs in the storage account.
:type retention_days: int
:param audit_actions_and_groups: Specifies the Actions-Groups and Actions
to audit.
The recommended set of action groups to use is the following combination -
this will audit all the queries and stored procedures executed against the
database, as well as successful and failed logins:
BATCH_COMPLETED_GROUP,
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP,
FAILED_DATABASE_AUTHENTICATION_GROUP.
This above combination is also the set that is configured by default when
enabling auditing from the Azure portal.
The supported action groups to audit are (note: choose only specific
groups that cover your auditing needs. Using unnecessary groups could lead
to very large quantities of audit records):
APPLICATION_ROLE_CHANGE_PASSWORD_GROUP
BACKUP_RESTORE_GROUP
DATABASE_LOGOUT_GROUP
DATABASE_OBJECT_CHANGE_GROUP
DATABASE_OBJECT_OWNERSHIP_CHANGE_GROUP
DATABASE_OBJECT_PERMISSION_CHANGE_GROUP
DATABASE_OPERATION_GROUP
DATABASE_PERMISSION_CHANGE_GROUP
DATABASE_PRINCIPAL_CHANGE_GROUP
DATABASE_PRINCIPAL_IMPERSONATION_GROUP
DATABASE_ROLE_MEMBER_CHANGE_GROUP
FAILED_DATABASE_AUTHENTICATION_GROUP
SCHEMA_OBJECT_ACCESS_GROUP
SCHEMA_OBJECT_CHANGE_GROUP
SCHEMA_OBJECT_OWNERSHIP_CHANGE_GROUP
SCHEMA_OBJECT_PERMISSION_CHANGE_GROUP
SUCCESSFUL_DATABASE_AUTHENTICATION_GROUP
USER_CHANGE_PASSWORD_GROUP
BATCH_STARTED_GROUP
BATCH_COMPLETED_GROUP
These are groups that cover all sql statements and stored procedures
executed against the database, and should not be used in combination with
other groups as this will result in duplicate audit logs.
For more information, see [Database-Level Audit Action
Groups](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-action-groups).
For Database auditing policy, specific Actions can also be specified (note
that Actions cannot be specified for Server auditing policy). The
supported actions to audit are:
SELECT
UPDATE
INSERT
DELETE
EXECUTE
RECEIVE
REFERENCES
The general form for defining an action to be audited is:
{action} ON {object} BY {principal}
Note that <object> in the above format can refer to an object like a
table, view, or stored procedure, or an entire database or schema. For the
latter cases, the forms DATABASE::{db_name} and SCHEMA::{schema_name} are
used, respectively.
For example:
SELECT on dbo.myTable by public
SELECT on DATABASE::myDatabase by public
SELECT on SCHEMA::mySchema by public
For more information, see [Database-Level Audit
Actions](https://docs.microsoft.com/en-us/sql/relational-databases/security/auditing/sql-server-audit-action-groups-and-actions#database-level-audit-actions)
:type audit_actions_and_groups: list[str]
:param storage_account_subscription_id: Specifies the blob storage
subscription Id.
:type storage_account_subscription_id: str
:param is_storage_secondary_key_in_use: Specifies whether
storageAccountAccessKey value is the storage's secondary key.
:type is_storage_secondary_key_in_use: bool
:param is_azure_monitor_target_enabled: Specifies whether audit events are
sent to Azure Monitor.
In order to send the events to Azure Monitor, specify 'state' as 'Enabled'
and 'isAzureMonitorTargetEnabled' as true.
When using REST API to configure auditing, Diagnostic Settings with
'SQLSecurityAuditEvents' diagnostic logs category on the database should
be also created.
Note that for server level audit you should use the 'master' database as
{databaseName}.
Diagnostic Settings URI format:
PUT
https://management.azure.com/subscriptions/{subscriptionId}/resourceGroups/{resourceGroup}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/providers/microsoft.insights/diagnosticSettings/{settingsName}?api-version=2017-05-01-preview
For more information, see [Diagnostic Settings REST
API](https://go.microsoft.com/fwlink/?linkid=2033207)
or [Diagnostic Settings
PowerShell](https://go.microsoft.com/fwlink/?linkid=2033043)
:type is_azure_monitor_target_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'BlobAuditingPolicyState'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'audit_actions_and_groups': {'key': 'properties.auditActionsAndGroups', 'type': '[str]'},
'storage_account_subscription_id': {'key': 'properties.storageAccountSubscriptionId', 'type': 'str'},
'is_storage_secondary_key_in_use': {'key': 'properties.isStorageSecondaryKeyInUse', 'type': 'bool'},
'is_azure_monitor_target_enabled': {'key': 'properties.isAzureMonitorTargetEnabled', 'type': 'bool'},
}
def __init__(self, *, state, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, audit_actions_and_groups=None, storage_account_subscription_id: str=None, is_storage_secondary_key_in_use: bool=None, is_azure_monitor_target_enabled: bool=None, **kwargs) -> None:
super(ServerBlobAuditingPolicy, self).__init__(**kwargs)
self.state = state
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.audit_actions_and_groups = audit_actions_and_groups
self.storage_account_subscription_id = storage_account_subscription_id
self.is_storage_secondary_key_in_use = is_storage_secondary_key_in_use
self.is_azure_monitor_target_enabled = is_azure_monitor_target_enabled
class ServerCommunicationLink(ProxyResource):
"""Server communication link.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar state: The state.
:vartype state: str
:param partner_server: Required. The name of the partner server.
:type partner_server: str
:ivar location: Communication link location.
:vartype location: str
:ivar kind: Communication link kind. This property is used for Azure
Portal metadata.
:vartype kind: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'readonly': True},
'partner_server': {'required': True},
'location': {'readonly': True},
'kind': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'partner_server': {'key': 'properties.partnerServer', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
}
def __init__(self, *, partner_server: str, **kwargs) -> None:
super(ServerCommunicationLink, self).__init__(**kwargs)
self.state = None
self.partner_server = partner_server
self.location = None
self.kind = None
class ServerConnectionPolicy(ProxyResource):
"""A server secure connection policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar kind: Metadata used for the Azure portal experience.
:vartype kind: str
:ivar location: Resource location.
:vartype location: str
:param connection_type: Required. The server connection type. Possible
values include: 'Default', 'Proxy', 'Redirect'
:type connection_type: str or ~azure.mgmt.sql.models.ServerConnectionType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'kind': {'readonly': True},
'location': {'readonly': True},
'connection_type': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'connection_type': {'key': 'properties.connectionType', 'type': 'ServerConnectionType'},
}
def __init__(self, *, connection_type, **kwargs) -> None:
super(ServerConnectionPolicy, self).__init__(**kwargs)
self.kind = None
self.location = None
self.connection_type = connection_type
class ServerDnsAlias(ProxyResource):
"""A server DNS alias.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar azure_dns_record: The fully qualified DNS record for alias
:vartype azure_dns_record: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'azure_dns_record': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'azure_dns_record': {'key': 'properties.azureDnsRecord', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(ServerDnsAlias, self).__init__(**kwargs)
self.azure_dns_record = None
class ServerDnsAliasAcquisition(Model):
"""A server DNS alias acquisition request.
:param old_server_dns_alias_id: The id of the server alias that will be
acquired to point to this server instead.
:type old_server_dns_alias_id: str
"""
_attribute_map = {
'old_server_dns_alias_id': {'key': 'oldServerDnsAliasId', 'type': 'str'},
}
def __init__(self, *, old_server_dns_alias_id: str=None, **kwargs) -> None:
super(ServerDnsAliasAcquisition, self).__init__(**kwargs)
self.old_server_dns_alias_id = old_server_dns_alias_id
class ServerKey(ProxyResource):
"""A server key.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param kind: Kind of encryption protector. This is metadata used for the
Azure portal experience.
:type kind: str
:ivar location: Resource location.
:vartype location: str
:ivar subregion: Subregion of the server key.
:vartype subregion: str
:param server_key_type: Required. The server key type like
'ServiceManaged', 'AzureKeyVault'. Possible values include:
'ServiceManaged', 'AzureKeyVault'
:type server_key_type: str or ~azure.mgmt.sql.models.ServerKeyType
:param uri: The URI of the server key.
:type uri: str
:param thumbprint: Thumbprint of the server key.
:type thumbprint: str
:param creation_date: The server key creation date.
:type creation_date: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'subregion': {'readonly': True},
'server_key_type': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'subregion': {'key': 'properties.subregion', 'type': 'str'},
'server_key_type': {'key': 'properties.serverKeyType', 'type': 'str'},
'uri': {'key': 'properties.uri', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'creation_date': {'key': 'properties.creationDate', 'type': 'iso-8601'},
}
def __init__(self, *, server_key_type, kind: str=None, uri: str=None, thumbprint: str=None, creation_date=None, **kwargs) -> None:
super(ServerKey, self).__init__(**kwargs)
self.kind = kind
self.location = None
self.subregion = None
self.server_key_type = server_key_type
self.uri = uri
self.thumbprint = thumbprint
self.creation_date = creation_date
class ServerSecurityAlertPolicy(ProxyResource):
"""A server security alert policy.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param state: Required. Specifies the state of the policy, whether it is
enabled or disabled or a policy has not been applied yet on the specific
database. Possible values include: 'New', 'Enabled', 'Disabled'
:type state: str or ~azure.mgmt.sql.models.SecurityAlertPolicyState
:param disabled_alerts: Specifies an array of alerts that are disabled.
Allowed values are: Sql_Injection, Sql_Injection_Vulnerability,
Access_Anomaly, Data_Exfiltration, Unsafe_Action
:type disabled_alerts: list[str]
:param email_addresses: Specifies an array of e-mail addresses to which
the alert is sent.
:type email_addresses: list[str]
:param email_account_admins: Specifies that the alert is sent to the
account administrators.
:type email_account_admins: bool
:param storage_endpoint: Specifies the blob storage endpoint (e.g.
https://MyAccount.blob.core.windows.net). This blob storage will hold all
Threat Detection audit logs.
:type storage_endpoint: str
:param storage_account_access_key: Specifies the identifier key of the
Threat Detection audit storage account.
:type storage_account_access_key: str
:param retention_days: Specifies the number of days to keep in the Threat
Detection audit logs.
:type retention_days: int
:ivar creation_time: Specifies the UTC creation time of the policy.
:vartype creation_time: datetime
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'required': True},
'creation_time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'SecurityAlertPolicyState'},
'disabled_alerts': {'key': 'properties.disabledAlerts', 'type': '[str]'},
'email_addresses': {'key': 'properties.emailAddresses', 'type': '[str]'},
'email_account_admins': {'key': 'properties.emailAccountAdmins', 'type': 'bool'},
'storage_endpoint': {'key': 'properties.storageEndpoint', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'retention_days': {'key': 'properties.retentionDays', 'type': 'int'},
'creation_time': {'key': 'properties.creationTime', 'type': 'iso-8601'},
}
def __init__(self, *, state, disabled_alerts=None, email_addresses=None, email_account_admins: bool=None, storage_endpoint: str=None, storage_account_access_key: str=None, retention_days: int=None, **kwargs) -> None:
super(ServerSecurityAlertPolicy, self).__init__(**kwargs)
self.state = state
self.disabled_alerts = disabled_alerts
self.email_addresses = email_addresses
self.email_account_admins = email_account_admins
self.storage_endpoint = storage_endpoint
self.storage_account_access_key = storage_account_access_key
self.retention_days = retention_days
self.creation_time = None
class ServerUpdate(Model):
"""An update request for an Azure SQL Database server.
Variables are only populated by the server, and will be ignored when
sending a request.
:param administrator_login: Administrator username for the server. Once
created it cannot be changed.
:type administrator_login: str
:param administrator_login_password: The administrator login password
(required for server creation).
:type administrator_login_password: str
:param version: The version of the server.
:type version: str
:ivar state: The state of the server.
:vartype state: str
:ivar fully_qualified_domain_name: The fully qualified domain name of the
server.
:vartype fully_qualified_domain_name: str
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'state': {'readonly': True},
'fully_qualified_domain_name': {'readonly': True},
}
_attribute_map = {
'administrator_login': {'key': 'properties.administratorLogin', 'type': 'str'},
'administrator_login_password': {'key': 'properties.administratorLoginPassword', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'fully_qualified_domain_name': {'key': 'properties.fullyQualifiedDomainName', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, administrator_login: str=None, administrator_login_password: str=None, version: str=None, tags=None, **kwargs) -> None:
super(ServerUpdate, self).__init__(**kwargs)
self.administrator_login = administrator_login
self.administrator_login_password = administrator_login_password
self.version = version
self.state = None
self.fully_qualified_domain_name = None
self.tags = tags
class ServerUsage(Model):
"""Represents server metrics.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: Name of the server usage metric.
:vartype name: str
:ivar resource_name: The name of the resource.
:vartype resource_name: str
:ivar display_name: The metric display name.
:vartype display_name: str
:ivar current_value: The current value of the metric.
:vartype current_value: float
:ivar limit: The current limit of the metric.
:vartype limit: float
:ivar unit: The units of the metric.
:vartype unit: str
:ivar next_reset_time: The next reset time for the metric (ISO8601
format).
:vartype next_reset_time: datetime
"""
_validation = {
'name': {'readonly': True},
'resource_name': {'readonly': True},
'display_name': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'unit': {'readonly': True},
'next_reset_time': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'resource_name': {'key': 'resourceName', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'float'},
'limit': {'key': 'limit', 'type': 'float'},
'unit': {'key': 'unit', 'type': 'str'},
'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'},
}
def __init__(self, **kwargs) -> None:
super(ServerUsage, self).__init__(**kwargs)
self.name = None
self.resource_name = None
self.display_name = None
self.current_value = None
self.limit = None
self.unit = None
self.next_reset_time = None
class ServerVersionCapability(Model):
"""The server capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: The server version name.
:vartype name: str
:ivar supported_editions: The list of supported database editions.
:vartype supported_editions:
list[~azure.mgmt.sql.models.EditionCapability]
:ivar supported_elastic_pool_editions: The list of supported elastic pool
editions.
:vartype supported_elastic_pool_editions:
list[~azure.mgmt.sql.models.ElasticPoolEditionCapability]
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'name': {'readonly': True},
'supported_editions': {'readonly': True},
'supported_elastic_pool_editions': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'supported_editions': {'key': 'supportedEditions', 'type': '[EditionCapability]'},
'supported_elastic_pool_editions': {'key': 'supportedElasticPoolEditions', 'type': '[ElasticPoolEditionCapability]'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ServerVersionCapability, self).__init__(**kwargs)
self.name = None
self.supported_editions = None
self.supported_elastic_pool_editions = None
self.status = None
self.reason = reason
class ServerVulnerabilityAssessment(ProxyResource):
"""A server vulnerability assessment.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param storage_container_path: Required. A blob storage container path to
hold the scan results (e.g.
https://myStorage.blob.core.windows.net/VaScans/).
:type storage_container_path: str
:param storage_container_sas_key: A shared access signature (SAS Key) that
has write access to the blob container specified in 'storageContainerPath'
parameter. If 'storageAccountAccessKey' isn't specified,
StorageContainerSasKey is required.
:type storage_container_sas_key: str
:param storage_account_access_key: Specifies the identifier key of the
storage account for vulnerability assessment scan results. If
'StorageContainerSasKey' isn't specified, storageAccountAccessKey is
required.
:type storage_account_access_key: str
:param recurring_scans: The recurring scans settings
:type recurring_scans:
~azure.mgmt.sql.models.VulnerabilityAssessmentRecurringScansProperties
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'storage_container_path': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_container_path': {'key': 'properties.storageContainerPath', 'type': 'str'},
'storage_container_sas_key': {'key': 'properties.storageContainerSasKey', 'type': 'str'},
'storage_account_access_key': {'key': 'properties.storageAccountAccessKey', 'type': 'str'},
'recurring_scans': {'key': 'properties.recurringScans', 'type': 'VulnerabilityAssessmentRecurringScansProperties'},
}
def __init__(self, *, storage_container_path: str, storage_container_sas_key: str=None, storage_account_access_key: str=None, recurring_scans=None, **kwargs) -> None:
super(ServerVulnerabilityAssessment, self).__init__(**kwargs)
self.storage_container_path = storage_container_path
self.storage_container_sas_key = storage_container_sas_key
self.storage_account_access_key = storage_account_access_key
self.recurring_scans = recurring_scans
class ServiceObjective(ProxyResource):
"""Represents a database service objective.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar service_objective_name: The name for the service objective.
:vartype service_objective_name: str
:ivar is_default: Gets whether the service level objective is the default
service objective.
:vartype is_default: bool
:ivar is_system: Gets whether the service level objective is a system
service objective.
:vartype is_system: bool
:ivar description: The description for the service level objective.
:vartype description: str
:ivar enabled: Gets whether the service level objective is enabled.
:vartype enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'service_objective_name': {'readonly': True},
'is_default': {'readonly': True},
'is_system': {'readonly': True},
'description': {'readonly': True},
'enabled': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'service_objective_name': {'key': 'properties.serviceObjectiveName', 'type': 'str'},
'is_default': {'key': 'properties.isDefault', 'type': 'bool'},
'is_system': {'key': 'properties.isSystem', 'type': 'bool'},
'description': {'key': 'properties.description', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
}
def __init__(self, **kwargs) -> None:
super(ServiceObjective, self).__init__(**kwargs)
self.service_objective_name = None
self.is_default = None
self.is_system = None
self.description = None
self.enabled = None
class ServiceObjectiveCapability(Model):
"""The service objectives capability.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: The unique ID of the service objective.
:vartype id: str
:ivar name: The service objective name.
:vartype name: str
:ivar supported_max_sizes: The list of supported maximum database sizes.
:vartype supported_max_sizes:
list[~azure.mgmt.sql.models.MaxSizeRangeCapability]
:ivar performance_level: The performance level.
:vartype performance_level:
~azure.mgmt.sql.models.PerformanceLevelCapability
:ivar sku: The sku.
:vartype sku: ~azure.mgmt.sql.models.Sku
:ivar supported_license_types: List of supported license types.
:vartype supported_license_types:
list[~azure.mgmt.sql.models.LicenseTypeCapability]
:ivar included_max_size: The included (free) max size.
:vartype included_max_size: ~azure.mgmt.sql.models.MaxSizeCapability
:ivar status: The status of the capability. Possible values include:
'Visible', 'Available', 'Default', 'Disabled'
:vartype status: str or ~azure.mgmt.sql.models.CapabilityStatus
:param reason: The reason for the capability not being available.
:type reason: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'supported_max_sizes': {'readonly': True},
'performance_level': {'readonly': True},
'sku': {'readonly': True},
'supported_license_types': {'readonly': True},
'included_max_size': {'readonly': True},
'status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'supported_max_sizes': {'key': 'supportedMaxSizes', 'type': '[MaxSizeRangeCapability]'},
'performance_level': {'key': 'performanceLevel', 'type': 'PerformanceLevelCapability'},
'sku': {'key': 'sku', 'type': 'Sku'},
'supported_license_types': {'key': 'supportedLicenseTypes', 'type': '[LicenseTypeCapability]'},
'included_max_size': {'key': 'includedMaxSize', 'type': 'MaxSizeCapability'},
'status': {'key': 'status', 'type': 'CapabilityStatus'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, *, reason: str=None, **kwargs) -> None:
super(ServiceObjectiveCapability, self).__init__(**kwargs)
self.id = None
self.name = None
self.supported_max_sizes = None
self.performance_level = None
self.sku = None
self.supported_license_types = None
self.included_max_size = None
self.status = None
self.reason = reason
class ServiceTierAdvisor(ProxyResource):
"""Represents a Service Tier Advisor.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar observation_period_start: The observation period start (ISO8601
format).
:vartype observation_period_start: datetime
:ivar observation_period_end: The observation period start (ISO8601
format).
:vartype observation_period_end: datetime
:ivar active_time_ratio: The activeTimeRatio for service tier advisor.
:vartype active_time_ratio: float
:ivar min_dtu: Gets or sets minDtu for service tier advisor.
:vartype min_dtu: float
:ivar avg_dtu: Gets or sets avgDtu for service tier advisor.
:vartype avg_dtu: float
:ivar max_dtu: Gets or sets maxDtu for service tier advisor.
:vartype max_dtu: float
:ivar max_size_in_gb: Gets or sets maxSizeInGB for service tier advisor.
:vartype max_size_in_gb: float
:ivar service_level_objective_usage_metrics: Gets or sets
serviceLevelObjectiveUsageMetrics for the service tier advisor.
:vartype service_level_objective_usage_metrics:
list[~azure.mgmt.sql.models.SloUsageMetric]
:ivar current_service_level_objective: Gets or sets
currentServiceLevelObjective for service tier advisor.
:vartype current_service_level_objective: str
:ivar current_service_level_objective_id: Gets or sets
currentServiceLevelObjectiveId for service tier advisor.
:vartype current_service_level_objective_id: str
:ivar usage_based_recommendation_service_level_objective: Gets or sets
usageBasedRecommendationServiceLevelObjective for service tier advisor.
:vartype usage_based_recommendation_service_level_objective: str
:ivar usage_based_recommendation_service_level_objective_id: Gets or sets
usageBasedRecommendationServiceLevelObjectiveId for service tier advisor.
:vartype usage_based_recommendation_service_level_objective_id: str
:ivar database_size_based_recommendation_service_level_objective: Gets or
sets databaseSizeBasedRecommendationServiceLevelObjective for service tier
advisor.
:vartype database_size_based_recommendation_service_level_objective: str
:ivar database_size_based_recommendation_service_level_objective_id: Gets
or sets databaseSizeBasedRecommendationServiceLevelObjectiveId for service
tier advisor.
:vartype database_size_based_recommendation_service_level_objective_id:
str
:ivar disaster_plan_based_recommendation_service_level_objective: Gets or
sets disasterPlanBasedRecommendationServiceLevelObjective for service tier
advisor.
:vartype disaster_plan_based_recommendation_service_level_objective: str
:ivar disaster_plan_based_recommendation_service_level_objective_id: Gets
or sets disasterPlanBasedRecommendationServiceLevelObjectiveId for service
tier advisor.
:vartype disaster_plan_based_recommendation_service_level_objective_id:
str
:ivar overall_recommendation_service_level_objective: Gets or sets
overallRecommendationServiceLevelObjective for service tier advisor.
:vartype overall_recommendation_service_level_objective: str
:ivar overall_recommendation_service_level_objective_id: Gets or sets
overallRecommendationServiceLevelObjectiveId for service tier advisor.
:vartype overall_recommendation_service_level_objective_id: str
:ivar confidence: Gets or sets confidence for service tier advisor.
:vartype confidence: float
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'observation_period_start': {'readonly': True},
'observation_period_end': {'readonly': True},
'active_time_ratio': {'readonly': True},
'min_dtu': {'readonly': True},
'avg_dtu': {'readonly': True},
'max_dtu': {'readonly': True},
'max_size_in_gb': {'readonly': True},
'service_level_objective_usage_metrics': {'readonly': True},
'current_service_level_objective': {'readonly': True},
'current_service_level_objective_id': {'readonly': True},
'usage_based_recommendation_service_level_objective': {'readonly': True},
'usage_based_recommendation_service_level_objective_id': {'readonly': True},
'database_size_based_recommendation_service_level_objective': {'readonly': True},
'database_size_based_recommendation_service_level_objective_id': {'readonly': True},
'disaster_plan_based_recommendation_service_level_objective': {'readonly': True},
'disaster_plan_based_recommendation_service_level_objective_id': {'readonly': True},
'overall_recommendation_service_level_objective': {'readonly': True},
'overall_recommendation_service_level_objective_id': {'readonly': True},
'confidence': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'observation_period_start': {'key': 'properties.observationPeriodStart', 'type': 'iso-8601'},
'observation_period_end': {'key': 'properties.observationPeriodEnd', 'type': 'iso-8601'},
'active_time_ratio': {'key': 'properties.activeTimeRatio', 'type': 'float'},
'min_dtu': {'key': 'properties.minDtu', 'type': 'float'},
'avg_dtu': {'key': 'properties.avgDtu', 'type': 'float'},
'max_dtu': {'key': 'properties.maxDtu', 'type': 'float'},
'max_size_in_gb': {'key': 'properties.maxSizeInGB', 'type': 'float'},
'service_level_objective_usage_metrics': {'key': 'properties.serviceLevelObjectiveUsageMetrics', 'type': '[SloUsageMetric]'},
'current_service_level_objective': {'key': 'properties.currentServiceLevelObjective', 'type': 'str'},
'current_service_level_objective_id': {'key': 'properties.currentServiceLevelObjectiveId', 'type': 'str'},
'usage_based_recommendation_service_level_objective': {'key': 'properties.usageBasedRecommendationServiceLevelObjective', 'type': 'str'},
'usage_based_recommendation_service_level_objective_id': {'key': 'properties.usageBasedRecommendationServiceLevelObjectiveId', 'type': 'str'},
'database_size_based_recommendation_service_level_objective': {'key': 'properties.databaseSizeBasedRecommendationServiceLevelObjective', 'type': 'str'},
'database_size_based_recommendation_service_level_objective_id': {'key': 'properties.databaseSizeBasedRecommendationServiceLevelObjectiveId', 'type': 'str'},
'disaster_plan_based_recommendation_service_level_objective': {'key': 'properties.disasterPlanBasedRecommendationServiceLevelObjective', 'type': 'str'},
'disaster_plan_based_recommendation_service_level_objective_id': {'key': 'properties.disasterPlanBasedRecommendationServiceLevelObjectiveId', 'type': 'str'},
'overall_recommendation_service_level_objective': {'key': 'properties.overallRecommendationServiceLevelObjective', 'type': 'str'},
'overall_recommendation_service_level_objective_id': {'key': 'properties.overallRecommendationServiceLevelObjectiveId', 'type': 'str'},
'confidence': {'key': 'properties.confidence', 'type': 'float'},
}
def __init__(self, **kwargs) -> None:
super(ServiceTierAdvisor, self).__init__(**kwargs)
self.observation_period_start = None
self.observation_period_end = None
self.active_time_ratio = None
self.min_dtu = None
self.avg_dtu = None
self.max_dtu = None
self.max_size_in_gb = None
self.service_level_objective_usage_metrics = None
self.current_service_level_objective = None
self.current_service_level_objective_id = None
self.usage_based_recommendation_service_level_objective = None
self.usage_based_recommendation_service_level_objective_id = None
self.database_size_based_recommendation_service_level_objective = None
self.database_size_based_recommendation_service_level_objective_id = None
self.disaster_plan_based_recommendation_service_level_objective = None
self.disaster_plan_based_recommendation_service_level_objective_id = None
self.overall_recommendation_service_level_objective = None
self.overall_recommendation_service_level_objective_id = None
self.confidence = None
class Sku(Model):
"""An ARM Resource SKU.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the SKU, typically, a letter + Number
code, e.g. P3.
:type name: str
:param tier: The tier or edition of the particular SKU, e.g. Basic,
Premium.
:type tier: str
:param size: Size of the particular SKU
:type size: str
:param family: If the service has different generations of hardware, for
the same SKU, then that can be captured here.
:type family: str
:param capacity: Capacity of the particular SKU.
:type capacity: int
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
'size': {'key': 'size', 'type': 'str'},
'family': {'key': 'family', 'type': 'str'},
'capacity': {'key': 'capacity', 'type': 'int'},
}
def __init__(self, *, name: str, tier: str=None, size: str=None, family: str=None, capacity: int=None, **kwargs) -> None:
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = tier
self.size = size
self.family = family
self.capacity = capacity
class SloUsageMetric(Model):
"""A Slo Usage Metric.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar service_level_objective: The serviceLevelObjective for SLO usage
metric. Possible values include: 'System', 'System0', 'System1',
'System2', 'System3', 'System4', 'System2L', 'System3L', 'System4L',
'Free', 'Basic', 'S0', 'S1', 'S2', 'S3', 'S4', 'S6', 'S7', 'S9', 'S12',
'P1', 'P2', 'P3', 'P4', 'P6', 'P11', 'P15', 'PRS1', 'PRS2', 'PRS4',
'PRS6', 'DW100', 'DW200', 'DW300', 'DW400', 'DW500', 'DW600', 'DW1000',
'DW1200', 'DW1000c', 'DW1500', 'DW1500c', 'DW2000', 'DW2000c', 'DW3000',
'DW2500c', 'DW3000c', 'DW6000', 'DW5000c', 'DW6000c', 'DW7500c',
'DW10000c', 'DW15000c', 'DW30000c', 'DS100', 'DS200', 'DS300', 'DS400',
'DS500', 'DS600', 'DS1000', 'DS1200', 'DS1500', 'DS2000', 'ElasticPool'
:vartype service_level_objective: str or
~azure.mgmt.sql.models.ServiceObjectiveName
:ivar service_level_objective_id: The serviceLevelObjectiveId for SLO
usage metric.
:vartype service_level_objective_id: str
:ivar in_range_time_ratio: Gets or sets inRangeTimeRatio for SLO usage
metric.
:vartype in_range_time_ratio: float
"""
_validation = {
'service_level_objective': {'readonly': True},
'service_level_objective_id': {'readonly': True},
'in_range_time_ratio': {'readonly': True},
}
_attribute_map = {
'service_level_objective': {'key': 'serviceLevelObjective', 'type': 'str'},
'service_level_objective_id': {'key': 'serviceLevelObjectiveId', 'type': 'str'},
'in_range_time_ratio': {'key': 'inRangeTimeRatio', 'type': 'float'},
}
def __init__(self, **kwargs) -> None:
super(SloUsageMetric, self).__init__(**kwargs)
self.service_level_objective = None
self.service_level_objective_id = None
self.in_range_time_ratio = None
class SubscriptionUsage(ProxyResource):
"""Usage Metric of a Subscription in a Location.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar display_name: User-readable name of the metric.
:vartype display_name: str
:ivar current_value: Current value of the metric.
:vartype current_value: float
:ivar limit: Boundary value of the metric.
:vartype limit: float
:ivar unit: Unit of the metric.
:vartype unit: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'display_name': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'unit': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'display_name': {'key': 'properties.displayName', 'type': 'str'},
'current_value': {'key': 'properties.currentValue', 'type': 'float'},
'limit': {'key': 'properties.limit', 'type': 'float'},
'unit': {'key': 'properties.unit', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SubscriptionUsage, self).__init__(**kwargs)
self.display_name = None
self.current_value = None
self.limit = None
self.unit = None
class SyncAgent(ProxyResource):
"""An Azure SQL Database sync agent.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar sync_agent_name: Name of the sync agent.
:vartype sync_agent_name: str
:param sync_database_id: ARM resource id of the sync database in the sync
agent.
:type sync_database_id: str
:ivar last_alive_time: Last alive time of the sync agent.
:vartype last_alive_time: datetime
:ivar state: State of the sync agent. Possible values include: 'Online',
'Offline', 'NeverConnected'
:vartype state: str or ~azure.mgmt.sql.models.SyncAgentState
:ivar is_up_to_date: If the sync agent version is up to date.
:vartype is_up_to_date: bool
:ivar expiry_time: Expiration time of the sync agent version.
:vartype expiry_time: datetime
:ivar version: Version of the sync agent.
:vartype version: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'sync_agent_name': {'readonly': True},
'last_alive_time': {'readonly': True},
'state': {'readonly': True},
'is_up_to_date': {'readonly': True},
'expiry_time': {'readonly': True},
'version': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sync_agent_name': {'key': 'properties.name', 'type': 'str'},
'sync_database_id': {'key': 'properties.syncDatabaseId', 'type': 'str'},
'last_alive_time': {'key': 'properties.lastAliveTime', 'type': 'iso-8601'},
'state': {'key': 'properties.state', 'type': 'str'},
'is_up_to_date': {'key': 'properties.isUpToDate', 'type': 'bool'},
'expiry_time': {'key': 'properties.expiryTime', 'type': 'iso-8601'},
'version': {'key': 'properties.version', 'type': 'str'},
}
def __init__(self, *, sync_database_id: str=None, **kwargs) -> None:
super(SyncAgent, self).__init__(**kwargs)
self.sync_agent_name = None
self.sync_database_id = sync_database_id
self.last_alive_time = None
self.state = None
self.is_up_to_date = None
self.expiry_time = None
self.version = None
class SyncAgentKeyProperties(Model):
"""Properties of an Azure SQL Database sync agent key.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar sync_agent_key: Key of sync agent.
:vartype sync_agent_key: str
"""
_validation = {
'sync_agent_key': {'readonly': True},
}
_attribute_map = {
'sync_agent_key': {'key': 'syncAgentKey', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncAgentKeyProperties, self).__init__(**kwargs)
self.sync_agent_key = None
class SyncAgentLinkedDatabase(ProxyResource):
"""An Azure SQL Database sync agent linked database.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar database_type: Type of the sync agent linked database. Possible
values include: 'AzureSqlDatabase', 'SqlServerDatabase'
:vartype database_type: str or ~azure.mgmt.sql.models.SyncMemberDbType
:ivar database_id: Id of the sync agent linked database.
:vartype database_id: str
:ivar description: Description of the sync agent linked database.
:vartype description: str
:ivar server_name: Server name of the sync agent linked database.
:vartype server_name: str
:ivar database_name: Database name of the sync agent linked database.
:vartype database_name: str
:ivar user_name: User name of the sync agent linked database.
:vartype user_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'database_type': {'readonly': True},
'database_id': {'readonly': True},
'description': {'readonly': True},
'server_name': {'readonly': True},
'database_name': {'readonly': True},
'user_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'database_type': {'key': 'properties.databaseType', 'type': 'str'},
'database_id': {'key': 'properties.databaseId', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'user_name': {'key': 'properties.userName', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncAgentLinkedDatabase, self).__init__(**kwargs)
self.database_type = None
self.database_id = None
self.description = None
self.server_name = None
self.database_name = None
self.user_name = None
class SyncDatabaseIdProperties(Model):
"""Properties of the sync database id.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: ARM resource id of sync database.
:vartype id: str
"""
_validation = {
'id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncDatabaseIdProperties, self).__init__(**kwargs)
self.id = None
class SyncFullSchemaProperties(Model):
"""Properties of the database full schema.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar tables: List of tables in the database full schema.
:vartype tables: list[~azure.mgmt.sql.models.SyncFullSchemaTable]
:ivar last_update_time: Last update time of the database schema.
:vartype last_update_time: datetime
"""
_validation = {
'tables': {'readonly': True},
'last_update_time': {'readonly': True},
}
_attribute_map = {
'tables': {'key': 'tables', 'type': '[SyncFullSchemaTable]'},
'last_update_time': {'key': 'lastUpdateTime', 'type': 'iso-8601'},
}
def __init__(self, **kwargs) -> None:
super(SyncFullSchemaProperties, self).__init__(**kwargs)
self.tables = None
self.last_update_time = None
class SyncFullSchemaTable(Model):
"""Properties of the table in the database full schema.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar columns: List of columns in the table of database full schema.
:vartype columns: list[~azure.mgmt.sql.models.SyncFullSchemaTableColumn]
:ivar error_id: Error id of the table.
:vartype error_id: str
:ivar has_error: If there is error in the table.
:vartype has_error: bool
:ivar name: Name of the table.
:vartype name: str
:ivar quoted_name: Quoted name of the table.
:vartype quoted_name: str
"""
_validation = {
'columns': {'readonly': True},
'error_id': {'readonly': True},
'has_error': {'readonly': True},
'name': {'readonly': True},
'quoted_name': {'readonly': True},
}
_attribute_map = {
'columns': {'key': 'columns', 'type': '[SyncFullSchemaTableColumn]'},
'error_id': {'key': 'errorId', 'type': 'str'},
'has_error': {'key': 'hasError', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'quoted_name': {'key': 'quotedName', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncFullSchemaTable, self).__init__(**kwargs)
self.columns = None
self.error_id = None
self.has_error = None
self.name = None
self.quoted_name = None
class SyncFullSchemaTableColumn(Model):
"""Properties of the column in the table of database full schema.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar data_size: Data size of the column.
:vartype data_size: str
:ivar data_type: Data type of the column.
:vartype data_type: str
:ivar error_id: Error id of the column.
:vartype error_id: str
:ivar has_error: If there is error in the table.
:vartype has_error: bool
:ivar is_primary_key: If it is the primary key of the table.
:vartype is_primary_key: bool
:ivar name: Name of the column.
:vartype name: str
:ivar quoted_name: Quoted name of the column.
:vartype quoted_name: str
"""
_validation = {
'data_size': {'readonly': True},
'data_type': {'readonly': True},
'error_id': {'readonly': True},
'has_error': {'readonly': True},
'is_primary_key': {'readonly': True},
'name': {'readonly': True},
'quoted_name': {'readonly': True},
}
_attribute_map = {
'data_size': {'key': 'dataSize', 'type': 'str'},
'data_type': {'key': 'dataType', 'type': 'str'},
'error_id': {'key': 'errorId', 'type': 'str'},
'has_error': {'key': 'hasError', 'type': 'bool'},
'is_primary_key': {'key': 'isPrimaryKey', 'type': 'bool'},
'name': {'key': 'name', 'type': 'str'},
'quoted_name': {'key': 'quotedName', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncFullSchemaTableColumn, self).__init__(**kwargs)
self.data_size = None
self.data_type = None
self.error_id = None
self.has_error = None
self.is_primary_key = None
self.name = None
self.quoted_name = None
class SyncGroup(ProxyResource):
"""An Azure SQL Database sync group.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param interval: Sync interval of the sync group.
:type interval: int
:ivar last_sync_time: Last sync time of the sync group.
:vartype last_sync_time: datetime
:param conflict_resolution_policy: Conflict resolution policy of the sync
group. Possible values include: 'HubWin', 'MemberWin'
:type conflict_resolution_policy: str or
~azure.mgmt.sql.models.SyncConflictResolutionPolicy
:param sync_database_id: ARM resource id of the sync database in the sync
group.
:type sync_database_id: str
:param hub_database_user_name: User name for the sync group hub database
credential.
:type hub_database_user_name: str
:param hub_database_password: Password for the sync group hub database
credential.
:type hub_database_password: str
:ivar sync_state: Sync state of the sync group. Possible values include:
'NotReady', 'Error', 'Warning', 'Progressing', 'Good'
:vartype sync_state: str or ~azure.mgmt.sql.models.SyncGroupState
:param schema: Sync schema of the sync group.
:type schema: ~azure.mgmt.sql.models.SyncGroupSchema
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'last_sync_time': {'readonly': True},
'sync_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'interval': {'key': 'properties.interval', 'type': 'int'},
'last_sync_time': {'key': 'properties.lastSyncTime', 'type': 'iso-8601'},
'conflict_resolution_policy': {'key': 'properties.conflictResolutionPolicy', 'type': 'str'},
'sync_database_id': {'key': 'properties.syncDatabaseId', 'type': 'str'},
'hub_database_user_name': {'key': 'properties.hubDatabaseUserName', 'type': 'str'},
'hub_database_password': {'key': 'properties.hubDatabasePassword', 'type': 'str'},
'sync_state': {'key': 'properties.syncState', 'type': 'str'},
'schema': {'key': 'properties.schema', 'type': 'SyncGroupSchema'},
}
def __init__(self, *, interval: int=None, conflict_resolution_policy=None, sync_database_id: str=None, hub_database_user_name: str=None, hub_database_password: str=None, schema=None, **kwargs) -> None:
super(SyncGroup, self).__init__(**kwargs)
self.interval = interval
self.last_sync_time = None
self.conflict_resolution_policy = conflict_resolution_policy
self.sync_database_id = sync_database_id
self.hub_database_user_name = hub_database_user_name
self.hub_database_password = hub_database_password
self.sync_state = None
self.schema = schema
class SyncGroupLogProperties(Model):
"""Properties of an Azure SQL Database sync group log.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar timestamp: Timestamp of the sync group log.
:vartype timestamp: datetime
:ivar type: Type of the sync group log. Possible values include: 'All',
'Error', 'Warning', 'Success'
:vartype type: str or ~azure.mgmt.sql.models.SyncGroupLogType
:ivar source: Source of the sync group log.
:vartype source: str
:ivar details: Details of the sync group log.
:vartype details: str
:ivar tracing_id: TracingId of the sync group log.
:vartype tracing_id: str
:ivar operation_status: OperationStatus of the sync group log.
:vartype operation_status: str
"""
_validation = {
'timestamp': {'readonly': True},
'type': {'readonly': True},
'source': {'readonly': True},
'details': {'readonly': True},
'tracing_id': {'readonly': True},
'operation_status': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'source': {'key': 'source', 'type': 'str'},
'details': {'key': 'details', 'type': 'str'},
'tracing_id': {'key': 'tracingId', 'type': 'str'},
'operation_status': {'key': 'operationStatus', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(SyncGroupLogProperties, self).__init__(**kwargs)
self.timestamp = None
self.type = None
self.source = None
self.details = None
self.tracing_id = None
self.operation_status = None
class SyncGroupSchema(Model):
"""Properties of sync group schema.
:param tables: List of tables in sync group schema.
:type tables: list[~azure.mgmt.sql.models.SyncGroupSchemaTable]
:param master_sync_member_name: Name of master sync member where the
schema is from.
:type master_sync_member_name: str
"""
_attribute_map = {
'tables': {'key': 'tables', 'type': '[SyncGroupSchemaTable]'},
'master_sync_member_name': {'key': 'masterSyncMemberName', 'type': 'str'},
}
def __init__(self, *, tables=None, master_sync_member_name: str=None, **kwargs) -> None:
super(SyncGroupSchema, self).__init__(**kwargs)
self.tables = tables
self.master_sync_member_name = master_sync_member_name
class SyncGroupSchemaTable(Model):
"""Properties of table in sync group schema.
:param columns: List of columns in sync group schema.
:type columns: list[~azure.mgmt.sql.models.SyncGroupSchemaTableColumn]
:param quoted_name: Quoted name of sync group schema table.
:type quoted_name: str
"""
_attribute_map = {
'columns': {'key': 'columns', 'type': '[SyncGroupSchemaTableColumn]'},
'quoted_name': {'key': 'quotedName', 'type': 'str'},
}
def __init__(self, *, columns=None, quoted_name: str=None, **kwargs) -> None:
super(SyncGroupSchemaTable, self).__init__(**kwargs)
self.columns = columns
self.quoted_name = quoted_name
class SyncGroupSchemaTableColumn(Model):
"""Properties of column in sync group table.
:param quoted_name: Quoted name of sync group table column.
:type quoted_name: str
:param data_size: Data size of the column.
:type data_size: str
:param data_type: Data type of the column.
:type data_type: str
"""
_attribute_map = {
'quoted_name': {'key': 'quotedName', 'type': 'str'},
'data_size': {'key': 'dataSize', 'type': 'str'},
'data_type': {'key': 'dataType', 'type': 'str'},
}
def __init__(self, *, quoted_name: str=None, data_size: str=None, data_type: str=None, **kwargs) -> None:
super(SyncGroupSchemaTableColumn, self).__init__(**kwargs)
self.quoted_name = quoted_name
self.data_size = data_size
self.data_type = data_type
class SyncMember(ProxyResource):
"""An Azure SQL Database sync member.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param database_type: Database type of the sync member. Possible values
include: 'AzureSqlDatabase', 'SqlServerDatabase'
:type database_type: str or ~azure.mgmt.sql.models.SyncMemberDbType
:param sync_agent_id: ARM resource id of the sync agent in the sync
member.
:type sync_agent_id: str
:param sql_server_database_id: SQL Server database id of the sync member.
:type sql_server_database_id: str
:param server_name: Server name of the member database in the sync member
:type server_name: str
:param database_name: Database name of the member database in the sync
member.
:type database_name: str
:param user_name: User name of the member database in the sync member.
:type user_name: str
:param password: Password of the member database in the sync member.
:type password: str
:param sync_direction: Sync direction of the sync member. Possible values
include: 'Bidirectional', 'OneWayMemberToHub', 'OneWayHubToMember'
:type sync_direction: str or ~azure.mgmt.sql.models.SyncDirection
:ivar sync_state: Sync state of the sync member. Possible values include:
'SyncInProgress', 'SyncSucceeded', 'SyncFailed',
'DisabledTombstoneCleanup', 'DisabledBackupRestore',
'SyncSucceededWithWarnings', 'SyncCancelling', 'SyncCancelled',
'UnProvisioned', 'Provisioning', 'Provisioned', 'ProvisionFailed',
'DeProvisioning', 'DeProvisioned', 'DeProvisionFailed', 'Reprovisioning',
'ReprovisionFailed', 'UnReprovisioned'
:vartype sync_state: str or ~azure.mgmt.sql.models.SyncMemberState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'sync_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'database_type': {'key': 'properties.databaseType', 'type': 'str'},
'sync_agent_id': {'key': 'properties.syncAgentId', 'type': 'str'},
'sql_server_database_id': {'key': 'properties.sqlServerDatabaseId', 'type': 'str'},
'server_name': {'key': 'properties.serverName', 'type': 'str'},
'database_name': {'key': 'properties.databaseName', 'type': 'str'},
'user_name': {'key': 'properties.userName', 'type': 'str'},
'password': {'key': 'properties.password', 'type': 'str'},
'sync_direction': {'key': 'properties.syncDirection', 'type': 'str'},
'sync_state': {'key': 'properties.syncState', 'type': 'str'},
}
def __init__(self, *, database_type=None, sync_agent_id: str=None, sql_server_database_id: str=None, server_name: str=None, database_name: str=None, user_name: str=None, password: str=None, sync_direction=None, **kwargs) -> None:
super(SyncMember, self).__init__(**kwargs)
self.database_type = database_type
self.sync_agent_id = sync_agent_id
self.sql_server_database_id = sql_server_database_id
self.server_name = server_name
self.database_name = database_name
self.user_name = user_name
self.password = password
self.sync_direction = sync_direction
self.sync_state = None
class TdeCertificate(ProxyResource):
"""A TDE certificate that can be uploaded into a server.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param private_blob: Required. The base64 encoded certificate private
blob.
:type private_blob: str
:param cert_password: The certificate password.
:type cert_password: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'private_blob': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'private_blob': {'key': 'properties.privateBlob', 'type': 'str'},
'cert_password': {'key': 'properties.certPassword', 'type': 'str'},
}
def __init__(self, *, private_blob: str, cert_password: str=None, **kwargs) -> None:
super(TdeCertificate, self).__init__(**kwargs)
self.private_blob = private_blob
self.cert_password = cert_password
class TransparentDataEncryption(ProxyResource):
"""Represents a database transparent data encryption configuration.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:param status: The status of the database transparent data encryption.
Possible values include: 'Enabled', 'Disabled'
:type status: str or
~azure.mgmt.sql.models.TransparentDataEncryptionStatus
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'TransparentDataEncryptionStatus'},
}
def __init__(self, *, status=None, **kwargs) -> None:
super(TransparentDataEncryption, self).__init__(**kwargs)
self.location = None
self.status = status
class TransparentDataEncryptionActivity(ProxyResource):
"""Represents a database transparent data encryption Scan.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar location: Resource location.
:vartype location: str
:ivar status: The status of the database. Possible values include:
'Encrypting', 'Decrypting'
:vartype status: str or
~azure.mgmt.sql.models.TransparentDataEncryptionActivityStatus
:ivar percent_complete: The percent complete of the transparent data
encryption scan for a database.
:vartype percent_complete: float
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'readonly': True},
'status': {'readonly': True},
'percent_complete': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'percent_complete': {'key': 'properties.percentComplete', 'type': 'float'},
}
def __init__(self, **kwargs) -> None:
super(TransparentDataEncryptionActivity, self).__init__(**kwargs)
self.location = None
self.status = None
self.percent_complete = None
class Usage(Model):
"""ARM usage.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: ~azure.mgmt.sql.models.Name
:ivar type: Resource type.
:vartype type: str
:ivar unit: Usage unit.
:vartype unit: str
:ivar current_value: Usage current value.
:vartype current_value: int
:ivar limit: Usage limit.
:vartype limit: int
:ivar requested_limit: Usage requested limit.
:vartype requested_limit: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'unit': {'readonly': True},
'current_value': {'readonly': True},
'limit': {'readonly': True},
'requested_limit': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'Name'},
'type': {'key': 'type', 'type': 'str'},
'unit': {'key': 'unit', 'type': 'str'},
'current_value': {'key': 'currentValue', 'type': 'int'},
'limit': {'key': 'limit', 'type': 'int'},
'requested_limit': {'key': 'requestedLimit', 'type': 'int'},
}
def __init__(self, **kwargs) -> None:
super(Usage, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
self.unit = None
self.current_value = None
self.limit = None
self.requested_limit = None
class VirtualCluster(TrackedResource):
"""An Azure SQL virtual cluster.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Required. Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:ivar subnet_id: Subnet resource ID for the virtual cluster.
:vartype subnet_id: str
:param family: If the service has different generations of hardware, for
the same SKU, then that can be captured here.
:type family: str
:ivar child_resources: List of resources in this virtual cluster.
:vartype child_resources: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'subnet_id': {'readonly': True},
'child_resources': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
'family': {'key': 'properties.family', 'type': 'str'},
'child_resources': {'key': 'properties.childResources', 'type': '[str]'},
}
def __init__(self, *, location: str, tags=None, family: str=None, **kwargs) -> None:
super(VirtualCluster, self).__init__(location=location, tags=tags, **kwargs)
self.subnet_id = None
self.family = family
self.child_resources = None
class VirtualClusterUpdate(Model):
"""An update request for an Azure SQL Database virtual cluster.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar subnet_id: Subnet resource ID for the virtual cluster.
:vartype subnet_id: str
:param family: If the service has different generations of hardware, for
the same SKU, then that can be captured here.
:type family: str
:ivar child_resources: List of resources in this virtual cluster.
:vartype child_resources: list[str]
:param tags: Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'subnet_id': {'readonly': True},
'child_resources': {'readonly': True},
}
_attribute_map = {
'subnet_id': {'key': 'properties.subnetId', 'type': 'str'},
'family': {'key': 'properties.family', 'type': 'str'},
'child_resources': {'key': 'properties.childResources', 'type': '[str]'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(self, *, family: str=None, tags=None, **kwargs) -> None:
super(VirtualClusterUpdate, self).__init__(**kwargs)
self.subnet_id = None
self.family = family
self.child_resources = None
self.tags = tags
class VirtualNetworkRule(ProxyResource):
"""A virtual network rule.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param virtual_network_subnet_id: Required. The ARM resource id of the
virtual network subnet.
:type virtual_network_subnet_id: str
:param ignore_missing_vnet_service_endpoint: Create firewall rule before
the virtual network has vnet service endpoint enabled.
:type ignore_missing_vnet_service_endpoint: bool
:ivar state: Virtual Network Rule State. Possible values include:
'Initializing', 'InProgress', 'Ready', 'Deleting', 'Unknown'
:vartype state: str or ~azure.mgmt.sql.models.VirtualNetworkRuleState
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_subnet_id': {'required': True},
'state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'virtual_network_subnet_id': {'key': 'properties.virtualNetworkSubnetId', 'type': 'str'},
'ignore_missing_vnet_service_endpoint': {'key': 'properties.ignoreMissingVnetServiceEndpoint', 'type': 'bool'},
'state': {'key': 'properties.state', 'type': 'str'},
}
def __init__(self, *, virtual_network_subnet_id: str, ignore_missing_vnet_service_endpoint: bool=None, **kwargs) -> None:
super(VirtualNetworkRule, self).__init__(**kwargs)
self.virtual_network_subnet_id = virtual_network_subnet_id
self.ignore_missing_vnet_service_endpoint = ignore_missing_vnet_service_endpoint
self.state = None
class VulnerabilityAssessmentRecurringScansProperties(Model):
"""Properties of a Vulnerability Assessment recurring scans.
:param is_enabled: Recurring scans state.
:type is_enabled: bool
:param email_subscription_admins: Specifies that the schedule scan
notification will be is sent to the subscription administrators. Default
value: True .
:type email_subscription_admins: bool
:param emails: Specifies an array of e-mail addresses to which the scan
notification is sent.
:type emails: list[str]
"""
_attribute_map = {
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'email_subscription_admins': {'key': 'emailSubscriptionAdmins', 'type': 'bool'},
'emails': {'key': 'emails', 'type': '[str]'},
}
def __init__(self, *, is_enabled: bool=None, email_subscription_admins: bool=True, emails=None, **kwargs) -> None:
super(VulnerabilityAssessmentRecurringScansProperties, self).__init__(**kwargs)
self.is_enabled = is_enabled
self.email_subscription_admins = email_subscription_admins
self.emails = emails
class VulnerabilityAssessmentScanError(Model):
"""Properties of a vulnerability assessment scan error.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar code: The error code.
:vartype code: str
:ivar message: The error message.
:vartype message: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(self, **kwargs) -> None:
super(VulnerabilityAssessmentScanError, self).__init__(**kwargs)
self.code = None
self.message = None
class VulnerabilityAssessmentScanRecord(ProxyResource):
"""A vulnerability assessment scan record.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:ivar scan_id: The scan ID.
:vartype scan_id: str
:ivar trigger_type: The scan trigger type. Possible values include:
'OnDemand', 'Recurring'
:vartype trigger_type: str or
~azure.mgmt.sql.models.VulnerabilityAssessmentScanTriggerType
:ivar state: The scan status. Possible values include: 'Passed', 'Failed',
'FailedToRun', 'InProgress'
:vartype state: str or
~azure.mgmt.sql.models.VulnerabilityAssessmentScanState
:ivar start_time: The scan start time (UTC).
:vartype start_time: datetime
:ivar end_time: The scan end time (UTC).
:vartype end_time: datetime
:ivar errors: The scan errors.
:vartype errors:
list[~azure.mgmt.sql.models.VulnerabilityAssessmentScanError]
:ivar storage_container_path: The scan results storage container path.
:vartype storage_container_path: str
:ivar number_of_failed_security_checks: The number of failed security
checks.
:vartype number_of_failed_security_checks: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'scan_id': {'readonly': True},
'trigger_type': {'readonly': True},
'state': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'errors': {'readonly': True},
'storage_container_path': {'readonly': True},
'number_of_failed_security_checks': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'scan_id': {'key': 'properties.scanId', 'type': 'str'},
'trigger_type': {'key': 'properties.triggerType', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'errors': {'key': 'properties.errors', 'type': '[VulnerabilityAssessmentScanError]'},
'storage_container_path': {'key': 'properties.storageContainerPath', 'type': 'str'},
'number_of_failed_security_checks': {'key': 'properties.numberOfFailedSecurityChecks', 'type': 'int'},
}
def __init__(self, **kwargs) -> None:
super(VulnerabilityAssessmentScanRecord, self).__init__(**kwargs)
self.scan_id = None
self.trigger_type = None
self.state = None
self.start_time = None
self.end_time = None
self.errors = None
self.storage_container_path = None
self.number_of_failed_security_checks = None
| [
"zikalino@microsoft.com"
] | zikalino@microsoft.com |
99568bf21ddb40ee0e1fa9bf07ed777ed5918f52 | 864619c0245254e01fc61ffb3e9942436b0f9a13 | /cerebtests/capabilities/__init__.py | 47a740e51f2dc5132c32ad8988051153a025b2a3 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | cerebunit/cerebtests | 094e65dcbc61027b183f28de73ce48e6f6d57ec7 | cf4b6d898f391db4f6200a8ee32753d0ff3ab200 | refs/heads/master | 2022-07-31T03:09:54.759945 | 2022-07-17T11:10:58 | 2022-07-17T11:10:58 | 139,698,304 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | # ~/cerebtests/cerebtests/capabilities/__init__.py
#from cerebtests.capabilities import cells
#from . import microcircuit
#from . import network
| [
"neuralgraphs@gmail.com"
] | neuralgraphs@gmail.com |
844d27ce562a28d26aab0686cf5d711c564a6f94 | 4015291afebfd346da3fee4b1d5a775882b5b461 | /services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_core.py | 6d5e133cdf76767f7aa5f336c39ea1574dfb6324 | [
"MIT"
] | permissive | pcrespov/osparc-simcore | 3a8a6b5252038542f515c7e90d983ac6f1fb4de7 | eb5e00bc2cf4acfe81f5dc422a5e50a4646c9596 | refs/heads/master | 2023-08-06T04:33:38.594066 | 2023-07-12T09:47:00 | 2023-07-12T09:47:00 | 130,357,545 | 0 | 1 | MIT | 2023-04-18T08:04:27 | 2018-04-20T12:10:41 | Python | UTF-8 | Python | false | false | 19,513 | py | import json
import logging
import warnings
from collections.abc import Mapping
from typing import Any, Final
import aiodocker
from aiodocker.utils import clean_filters, clean_map
from fastapi.encoders import jsonable_encoder
from models_library.aiodocker_api import AioDockerServiceSpec
from models_library.docker import to_simcore_runtime_docker_label_key
from models_library.projects import ProjectID
from models_library.projects_nodes_io import NodeID
from servicelib.json_serialization import json_dumps
from servicelib.utils import logged_gather
from starlette import status
from tenacity import TryAgain, retry
from tenacity._asyncio import AsyncRetrying
from tenacity.retry import retry_if_exception_type
from tenacity.stop import stop_after_delay
from tenacity.wait import wait_exponential, wait_random_exponential
from ....core.settings import DynamicSidecarSettings
from ....models.schemas.constants import (
DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL,
DYNAMIC_SIDECAR_SERVICE_PREFIX,
)
from ....models.schemas.dynamic_services import SchedulerData, ServiceId, ServiceState
from ....models.schemas.dynamic_services.scheduler import NetworkId
from ....utils.dict_utils import get_leaf_key_paths, nested_update
from ..docker_states import TASK_STATES_RUNNING, extract_task_state
from ..errors import DockerServiceNotFoundError, DynamicSidecarError, GenericDockerError
from ._utils import docker_client
NO_PENDING_OVERWRITE = {
ServiceState.FAILED,
ServiceState.COMPLETE,
ServiceState.RUNNING,
}
log = logging.getLogger(__name__)
async def get_swarm_network(dynamic_sidecar_settings: DynamicSidecarSettings) -> dict:
async with docker_client() as client:
all_networks = await client.networks.list()
network_name = "_default"
if dynamic_sidecar_settings.SIMCORE_SERVICES_NETWORK_NAME:
network_name = dynamic_sidecar_settings.SIMCORE_SERVICES_NETWORK_NAME
# try to find the network name (usually named STACKNAME_default)
networks: list[dict] = [
x for x in all_networks if "swarm" in x["Scope"] and network_name in x["Name"]
]
if not networks or len(networks) > 1:
msg = f"Swarm network name (searching for '*{network_name}*') is not configured.Found following networks: {networks}"
raise DynamicSidecarError(msg)
return networks[0]
async def create_network(network_config: dict[str, Any]) -> NetworkId:
async with docker_client() as client:
try:
docker_network = await client.networks.create(network_config)
docker_network_id: NetworkId = docker_network.id
return docker_network_id
except aiodocker.exceptions.DockerError as e:
network_name = network_config["Name"]
# make sure the current error being trapped is network dose not exit
if f"network with name {network_name} already exists" not in str(e):
raise
# Fetch network name if network already exists.
# The environment is trashed because there seems to be an issue
# when stopping previous services.
# It is not possible to immediately remove the network after
# a docker-compose down involving and external overlay network
# has removed a container; it results as already attached
for network_details in await client.networks.list():
if network_name == network_details["Name"]:
network_id: NetworkId = network_details["Id"]
return network_id
# finally raise an error if a network cannot be spawned
# pylint: disable=raise-missing-from
msg = f"Could not create or recover a network ID for {network_config}"
raise DynamicSidecarError(msg) from e
async def create_service_and_get_id(
create_service_data: AioDockerServiceSpec | dict[str, Any]
) -> ServiceId:
# NOTE: ideally the argument should always be AioDockerServiceSpec
# but for that we need get_dynamic_proxy_spec to return that type
async with docker_client() as client:
kwargs = jsonable_encoder(
create_service_data, by_alias=True, exclude_unset=True
)
logging.debug("Creating service with\n%s", json.dumps(kwargs, indent=1))
service_start_result = await client.services.create(**kwargs)
log.debug(
"Started service %s with\n%s",
service_start_result,
json.dumps(kwargs, indent=1),
)
if "ID" not in service_start_result:
msg = f"Error while starting service: {service_start_result!s}"
raise DynamicSidecarError(msg)
service_id: ServiceId = service_start_result["ID"]
return service_id
async def get_dynamic_sidecars_to_observe(
dynamic_sidecar_settings: DynamicSidecarSettings,
) -> list[SchedulerData]:
"""called when scheduler is started to discover new services to observe"""
async with docker_client() as client:
running_dynamic_sidecar_services = await _list_docker_services(
client,
node_id=None,
swarm_stack_name=dynamic_sidecar_settings.SWARM_STACK_NAME,
return_only_sidecars=True,
)
return [
SchedulerData.from_service_inspect(x) for x in running_dynamic_sidecar_services
]
async def _get_service_latest_task(service_id: str) -> Mapping[str, Any]:
try:
async with docker_client() as client:
service_associated_tasks = await client.tasks.list(
filters={"service": f"{service_id}"}
)
if not service_associated_tasks:
raise DockerServiceNotFoundError(service_id=service_id) # noqa: TRY301
# The service might have more then one task because the
# previous might have died out.
# Only interested in the latest task as only one task per
# service will be running.
sorted_tasks = sorted(service_associated_tasks, key=lambda task: task["UpdatedAt"]) # type: ignore
last_task: Mapping[str, Any] = sorted_tasks[-1]
return last_task
except GenericDockerError as err:
if err.original_exception.status == status.HTTP_404_NOT_FOUND:
raise DockerServiceNotFoundError(service_id=service_id) from err
raise
async def get_dynamic_sidecar_placement(
service_id: str, dynamic_sidecar_settings: DynamicSidecarSettings
) -> str:
"""
Waits until the service has a task in `running` state and
returns it's `docker_node_id`.
It is assumed that a `docker_node_id` exists if the service
is in `running` state.
"""
# NOTE: `wait_random_exponential` is key for reducing pressure on docker swarm
# The idea behind it is to avoid having concurrent retrying calls
# when the system is having issues to respond. If the system
# is failing clients are retrying at the same time,
# it makes harder to recover.
# Ideally you'd like to distribute the retries uniformly in time.
# For more details see `wait_random_exponential` documentation.
@retry(
wait=wait_random_exponential(multiplier=2, min=1, max=20),
stop=stop_after_delay(
dynamic_sidecar_settings.DYNAMIC_SIDECAR_STARTUP_TIMEOUT_S
),
)
async def _get_task_data_when_service_running(service_id: str) -> Mapping[str, Any]:
"""
Waits for dynamic-sidecar task to be `running` and returns the
task data.
"""
task = await _get_service_latest_task(service_id)
service_state = task["Status"]["State"]
if service_state not in TASK_STATES_RUNNING:
raise TryAgain()
return task
task = await _get_task_data_when_service_running(service_id=service_id)
docker_node_id: None | str = task.get("NodeID", None)
if not docker_node_id:
msg = f"Could not find an assigned NodeID for service_id={service_id}. Last task inspect result: {task}"
raise DynamicSidecarError(msg)
return docker_node_id
async def get_dynamic_sidecar_state(service_id: str) -> tuple[ServiceState, str]:
service_task = await _get_service_latest_task(service_id)
service_state, message = extract_task_state(task_status=service_task["Status"])
return service_state, message
async def is_dynamic_sidecar_stack_missing(
node_uuid: NodeID, dynamic_sidecar_settings: DynamicSidecarSettings
) -> bool:
"""Check if the proxy and the dynamic-sidecar are absent"""
async with docker_client() as client:
stack_services = await _list_docker_services(
client,
node_id=node_uuid,
swarm_stack_name=dynamic_sidecar_settings.SWARM_STACK_NAME,
return_only_sidecars=False,
)
return len(stack_services) == 0
_NUM_SIDECAR_STACK_SERVICES: Final[int] = 2
async def are_sidecar_and_proxy_services_present(
node_uuid: NodeID, dynamic_sidecar_settings: DynamicSidecarSettings
) -> bool:
"""
The dynamic-sidecar stack always expects to have 2 running services
"""
async with docker_client() as client:
stack_services = await _list_docker_services(
client,
node_id=node_uuid,
swarm_stack_name=dynamic_sidecar_settings.SWARM_STACK_NAME,
return_only_sidecars=False,
)
if len(stack_services) != _NUM_SIDECAR_STACK_SERVICES:
return False
return True
async def _list_docker_services(
client: aiodocker.docker.Docker,
*,
node_id: NodeID | None,
swarm_stack_name: str,
return_only_sidecars: bool,
) -> list[Mapping]:
# NOTE: this is here for backward compatibility when first deploying this change.
# shall be removed after 1-2 releases without issues
# backwards compatibility part
def _make_filters(*, backwards_compatible: bool) -> Mapping[str, Any]:
filters = {
"label": [
f"{'swarm_stack_name' if backwards_compatible else to_simcore_runtime_docker_label_key('swarm_stack_name')}={swarm_stack_name}",
],
}
if node_id:
filters["label"].append(
f"{'uuid' if backwards_compatible else to_simcore_runtime_docker_label_key('node_id')}={node_id}"
)
if return_only_sidecars:
filters["name"] = [f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}"]
return filters
warnings.warn(
"After PR#4453 [https://github.com/ITISFoundation/osparc-simcore/pull/4453] reaches"
" production, the backwards compatible code may be removed",
stacklevel=2,
)
services_list: list[Mapping] = await client.services.list(
filters=_make_filters(backwards_compatible=True)
) + await client.services.list(filters=_make_filters(backwards_compatible=False))
return services_list
async def remove_dynamic_sidecar_stack(
node_uuid: NodeID, dynamic_sidecar_settings: DynamicSidecarSettings
) -> None:
"""Removes all services from the stack, in theory there should only be 2 services"""
async with docker_client() as client:
services_to_remove = await _list_docker_services(
client,
node_id=node_uuid,
swarm_stack_name=dynamic_sidecar_settings.SWARM_STACK_NAME,
return_only_sidecars=False,
)
if services_to_remove:
await logged_gather(
*(
client.services.delete(service["ID"])
for service in services_to_remove
)
)
async def remove_dynamic_sidecar_network(network_name: str) -> bool:
try:
async with docker_client() as client:
network = await client.networks.get(network_name)
await network.delete()
return True
except GenericDockerError as e:
message = (
f"{e}\nTIP: The above error may occur when trying tor remove the network.\n"
"Docker takes some time to establish that the network has no more "
"containers attached to it."
)
log.warning(message)
return False
async def is_sidecar_running(
node_uuid: NodeID, dynamic_sidecar_settings: DynamicSidecarSettings
) -> bool:
async with docker_client() as client:
sidecar_service_list = await _list_docker_services(
client,
node_id=node_uuid,
swarm_stack_name=dynamic_sidecar_settings.SWARM_STACK_NAME,
return_only_sidecars=True,
)
if len(sidecar_service_list) != 1:
return False
# check if the any of the tasks for the service is in running state
service_id = sidecar_service_list[0]["ID"]
service_tasks = await client.tasks.list(
filters={"service": f"{service_id}", "desired-state": "running"}
)
return len(service_tasks) == 1
async def get_or_create_networks_ids(
networks: list[str], project_id: ProjectID
) -> dict[str, str]:
async def _get_id_from_name(client, network_name: str) -> str:
network = await client.networks.get(network_name)
network_inspect = await network.show()
network_id: str = network_inspect["Id"]
return network_id
async with docker_client() as client:
existing_networks_names = {x["Name"] for x in await client.networks.list()}
log.debug("existing_networks_names=%s", existing_networks_names)
# create networks if missing
for network in networks:
if network not in existing_networks_names:
network_config = {
"Name": network,
"Driver": "overlay",
"Labels": {
"com.simcore.description": "project service communication network",
# used by the director-v2 to remove the network when the last
# service connected to the network was removed
"project_id": f"{project_id}",
},
"Attachable": True,
"Internal": True, # no internet access
}
try:
await client.networks.create(network_config)
except aiodocker.exceptions.DockerError:
# multiple calls to this function can be processed in parallel
# this will cause creation to fail, it is OK to assume it already
# exist an raise an error (see below)
log.info(
"Network %s might already exist, skipping creation", network
)
networks_ids = await logged_gather(
*[_get_id_from_name(client, network) for network in networks]
)
return dict(zip(networks, networks_ids, strict=True))
async def get_projects_networks_containers(
project_id: ProjectID,
) -> dict[str, int]:
"""
Returns all current projects_networks for the project with
the amount of containers attached to them.
"""
async with docker_client() as client:
params = {"filters": clean_filters({"label": [f"project_id={project_id}"]})}
filtered_networks = (
# pylint:disable=protected-access
await client.networks.docker._query_json( # noqa: SLF001
"networks", params=params
)
)
if not filtered_networks:
return {}
def _count_containers(item: dict[str, Any]) -> int:
containers: list | None = item.get("Containers")
return 0 if containers is None else len(containers)
return {x["Name"]: _count_containers(x) for x in filtered_networks}
async def try_to_remove_network(network_name: str) -> None:
async with docker_client() as client:
network = await client.networks.get(network_name)
# if a project network for the current project has no more
# containers attached to it (because the last service which
# was using it was removed), also removed the network
try:
await network.delete()
except aiodocker.exceptions.DockerError:
log.warning("Could not remove network %s", network_name)
async def _update_service_spec(
service_name: str,
*,
update_in_service_spec: dict,
stop_delay: float = 10.0,
) -> None:
"""
Updates the spec of a service. The `update_spec_data` must always return the updated spec.
"""
async with docker_client() as client:
# NOTE: builtin `DockerServices.update` function is very limited.
# Using the same pattern but updating labels
# The docker service update API is async, so `update out of sequence` error
# might get raised. This is caused by the `service_version` being out of sync
# with what is currently stored in the docker daemon.
async for attempt in AsyncRetrying(
# waits exponentially to a max of `stop_delay` seconds
stop=stop_after_delay(stop_delay),
wait=wait_exponential(min=1),
retry=retry_if_exception_type(TryAgain),
reraise=True,
):
with attempt:
try:
# fetch information from service name
service_inspect = await client.services.inspect(service_name)
service_version = service_inspect["Version"]["Index"]
service_id = service_inspect["ID"]
spec = service_inspect["Spec"]
updated_spec = nested_update(
spec,
update_in_service_spec,
include=get_leaf_key_paths(update_in_service_spec),
)
await client._query_json( # pylint: disable=protected-access # noqa: SLF001
f"services/{service_id}/update",
method="POST",
data=json_dumps(clean_map(updated_spec)),
params={"version": service_version},
)
except aiodocker.exceptions.DockerError as e:
if (
e.status == status.HTTP_500_INTERNAL_SERVER_ERROR
and "out of sequence" in e.message
):
raise TryAgain() from e
raise
async def update_scheduler_data_label(scheduler_data: SchedulerData) -> None:
try:
await _update_service_spec(
service_name=scheduler_data.service_name,
update_in_service_spec={
"Labels": {
DYNAMIC_SIDECAR_SCHEDULER_DATA_LABEL: scheduler_data.as_label_data()
}
},
)
except GenericDockerError as e:
if e.original_exception.status == status.HTTP_404_NOT_FOUND:
log.info(
"Skipped labels update for service '%s' which could not be found.",
scheduler_data.service_name,
)
async def constrain_service_to_node(service_name: str, docker_node_id: str) -> None:
await _update_service_spec(
service_name,
update_in_service_spec={
"TaskTemplate": {
"Placement": {"Constraints": [f"node.id=={docker_node_id}"]}
}
},
)
log.info("Constraining service %s to node %s", service_name, docker_node_id)
| [
"noreply@github.com"
] | pcrespov.noreply@github.com |
b21b7c4509567cee5d172d6e123085031357a543 | 038ce0cf1d4e6f6a8ed6736663b6bb1e02d01b2a | /the_tale/post_service/tests/test_reset_password.py | cd1a25721437c7634379233bf22090e602731efb | [
"BSD-2-Clause-Views"
] | permissive | GrandUser/the-tale | d363fc34bc3cd04ced2bd718f375fa83f887c7df | 3f7ec22c457a0c400ddb51dede7e8a3e962acf83 | refs/heads/master | 2021-01-19T06:56:52.868165 | 2016-05-22T15:07:32 | 2016-05-22T15:07:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,765 | py | # coding: utf-8
from django.core import mail
from the_tale.common.utils import testcase
from the_tale.accounts.logic import register_user
from the_tale.accounts.prototypes import AccountPrototype, ResetPasswordTaskPrototype
from the_tale.game.logic import create_test_map
from the_tale.post_service.models import Message
from the_tale.post_service.prototypes import MessagePrototype
class ResetPasswordTests(testcase.TestCase):
def setUp(self):
super(ResetPasswordTests, self).setUp()
create_test_map()
register_user('user_1', 'user_1@test.com', '111111')
self.account_1 = AccountPrototype.get_by_nick('user_1')
self.reset_task = ResetPasswordTaskPrototype.create(self.account_1)
self.message = MessagePrototype.get_priority_message()
def test_register_message(self):
self.assertEqual(Message.objects.all().count(), 1)
def test_mail_send(self):
self.assertEqual(len(mail.outbox), 0)
self.message.process()
self.assertTrue(self.message.state.is_PROCESSED)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [self.account_1.email])
self.assertTrue(self.reset_task.uuid in mail.outbox[0].body)
self.assertTrue(self.reset_task.uuid in mail.outbox[0].alternatives[0][0])
def test_mail_send__to_system_user(self):
from the_tale.accounts.logic import get_system_user
Message.objects.all().delete()
ResetPasswordTaskPrototype.create(get_system_user())
message = MessagePrototype.get_priority_message()
self.assertEqual(len(mail.outbox), 0)
message.process()
self.assertTrue(message.state.is_PROCESSED)
self.assertEqual(len(mail.outbox), 0)
| [
"a.eletsky@gmail.com"
] | a.eletsky@gmail.com |
42972f492df490b20e312d8053977787b6f6f9b5 | 66d9f74aabb3ecf2a79f24d36f94e082166fa9df | /trunk/webui/cloud/page/TopMenu.py | 6dbfd0a623a8321499036e2ab404f130b01f4cf8 | [] | no_license | willcai1984/AerohiveAuto | 6b7313de7c09a7d9b749f4531751eac0999b41f7 | a4aeea7f98dc279b17515f5d1719efce20dd5133 | refs/heads/master | 2020-05-24T14:42:35.924787 | 2014-11-18T09:19:48 | 2014-11-18T09:19:48 | 26,797,519 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,317 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'hshao'
from selenium.webdriver.common.by import By
from webui import WebElement
class Home(WebElement):
menu_page_title = 'HiveManager NG'
Dashboard_btn = (By.XPATH, '//li[@data-dojo-attach-point="dashboardtab"]/a')
Monitor_btn = (By.XPATH, '//li[@data-dojo-attach-point="monitoringtab"]/a')
Devices_btn = (By.XPATH, '//li[@data-dojo-attach-point="devicesTab"]/a')
Configuration_btn = (By.XPATH, '//li[@data-dojo-attach-point="configurationtab"]/a')
Administration_btn = (By.XPATH, '//li[@class="data-dojo-attach-point="admintab"]/a')
class MenuSuccessfulPage(WebElement):
menu_successful_page_title = 'HiveManager NG'
Dashboard_successful_page_menu_title = 'Network Policies'
Monitor_successful_page_menu_title = 'Network Policies'
Devices_successful_page_menu_title = 'Devices'
Devices_successful_page_menu_title_xpath = (By.XPATH, '//div[@data-dojo-attach-point="DeviceListArea"]/descendant::div[span="Devices"]')
Configuration_successful_page_menu_title = 'Network Policies'
Configuration_successful_page_menu_title_xpath = (By.XPATH, '//div[@data-dojo-attach-point="NetworkPolicyListArea"]/div[@class="ui-tle"]/span')
Administration_successful_page_menu_title = 'Network Policies'
| [
"willcai1984@gmail.com"
] | willcai1984@gmail.com |
5755e6b8e66c11d2edb617fcfba69571cd90936f | 303bac96502e5b1666c05afd6c2e85cf33f19d8c | /solutions/python3/42.py | 7410ae5963ede23de80722a57ac467c5f85b6442 | [
"MIT"
] | permissive | jxhangithub/leetcode | 5e82f4aeee1bf201e93e889e5c4ded2fcda90437 | 0de1af607557d95856f0e4c2a12a56c8c57d731d | refs/heads/master | 2022-05-22T12:57:54.251281 | 2022-03-09T22:36:20 | 2022-03-09T22:36:20 | 370,508,127 | 1 | 0 | MIT | 2022-03-09T22:36:20 | 2021-05-24T23:16:10 | null | UTF-8 | Python | false | false | 426 | py | class Solution:
def trap(self, height):
res, left, l, r = 0, {}, 0, 0
for i, h in enumerate(height):
left[i] = l
if h > l:
l = h
for i in range(len(height) - 1, -1, -1):
roof = min(left[i] , r)
if roof > height[i]:
res += roof - height[i]
if height[i] > r:
r = height[i]
return res | [
"cenkay.arapsagolu@gmail.com"
] | cenkay.arapsagolu@gmail.com |
2741c2228d5f38ab4186616ac481b2ea60e1e963 | d042b8895dc8347356fa4d5984d07bff41eecc73 | /obtainfo/views/detail.py | 7f708199c23a55cb68966520677fad0b829334e5 | [
"Apache-2.0"
] | permissive | jzx1230/obtainfo | 257b075c32c3448096391f258f42dd7f0c081350 | 883c29ab0a462d11682b60b9b52b2fc93031b816 | refs/heads/master | 2021-05-08T04:19:33.810848 | 2015-10-13T10:10:10 | 2015-10-13T10:10:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,121 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import base64
import copy
import datetime
import hashlib
import logging
import os
import random
import re
import StringIO
import time
import urllib
import urlparse
from functools import wraps
from zipfile import ZipFile
import pymongo
from bson.objectid import ObjectId
from django.conf import settings
from django.core.cache import cache
from django.core.urlresolvers import NoReverseMatch, reverse
from django.db.models import F
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.template import Context
from django.template.loader import get_template
from django.utils import timezone
from django.utils.dateparse import parse_date
from django.utils.http import urlquote
from django.utils.translation import ugettext as _
from django.views.decorators.cache import cache_page
from django.views.decorators.csrf import csrf_exempt
from shortuuid import uuid
from obtainfo.models import MovieInfo
from obtainfo.templatetags.obtainfo_tags import pic_tag as render_pic
from pcnile.helper import group_list, md5sum
from sendfile import sendfile
logger = logging.getLogger(__name__)
re_urn = re.compile(ur'xt=urn:btih:(\w+)')
re_file_name = re.compile(r"[\/\\\:\*\?\"\<\>\|]")
verify_oid = lambda oid: True if re.match(r'^[0-9a-fA-F]{24}$', oid) else False
re_ua = re.compile(r"android|webos|iphone|ipad|ipod|blackberry|iemobile|opera mini", re.IGNORECASE)
def used_time_tag(func):
@wraps(func)
def inner(request, *args, **kwargs):
stamp = time.time()
response = func(request, *args, **kwargs)
logger.info('%s, %s' % (time.time() - stamp, request.path))
return response
return inner
def memoize(key_prefix):
def func_wrapper(func):
@wraps(func)
def inner(key, *args, **kwargs):
key = "%s_%s" % (key_prefix, key)
response = cache.get(key)
if response:
return response
else:
response = func(*args, **kwargs)
cache.set(key, response, timeout=60 * 60)
return response
return inner
return func_wrapper
def detail_thunder(request, uid):
if verify_oid(uid) == False:
raise Http404
link = "http://movie.obtainfo.com/detail/%s.zip" % uid
thunder = "thunder://" + base64.standard_b64encode('AA' + link + 'ZZ')
return render(request, 'thunder.html', {'thunder': thunder}, )
def detail_show(request, uid):
if verify_oid(uid) == False:
raise Http404
try:
collection = settings.MONGOINFO
m = collection.find_one({'_id': ObjectId(uid)})
except pymongo.errors.InvalidId:
logger.error('get a invalid movie id %s' % uid)
raise Http404
if m:
blog = get_template('show.txt')
article = blog.render(Context({'m': m}))
response = HttpResponse(mimetype="application/txt")
response["Content-Disposition"] = "attachment; filename=%s.txt" % m['_id']
response.write(article)
return response
else:
raise Http404
def detail_show_zip(request, uid):
if verify_oid(uid) == False:
raise Http404
try:
collection = settings.MONGOINFO
m = collection.find_one({'_id': ObjectId(uid)})
except pymongo.errors.InvalidId:
logger.error('get a invalid movie id %s' % uid)
raise Http404
if m:
blog = get_template('show.txt')
article = blog.render(Context({'m': m})).encode('utf-8')
in_memory = StringIO.StringIO()
zip = ZipFile(in_memory, "a")
try:
zip.writestr("%s.txt" % m['title'], article)
except:
zip.writestr("%s.txt" % m['_id'], article)
for f in zip.filelist:
f.create_system = 0
zip.close()
response = HttpResponse(mimetype="application/zip")
response["Content-Disposition"] = "attachment; filename=%s.zip" % m['_id']
in_memory.seek(0)
response.write(in_memory.read())
try:
mi, created = MovieInfo.objects.get_or_create(id=uid, defaults={'title': m['title'],
'timestamp': datetime.datetime.now()})
mi.zip_visitor = F('zip_visitor') + 1
mi.save()
except:
pass
return response
else:
raise Http404
def build_detail_html(request, uid):
try:
db = settings.MONGODB
m = db.info.find_one({'_id': ObjectId(uid)})
m['resource'] = db.resource.find_one({'_id': ObjectId(uid)})
del m['resource']['_id']
except:
raise Http404
if m['type'] == 'tv' and len(m['resource']['online']):
m['resource']['online_length'] = 0
for o in m['resource']['online']:
m['resource']['online_length'] += len(o['resource'])
for site in o['resource']:
site['id'] = uuid()[:4]
"""
fix douban comment date
"""
m['comment'] = m['comment'][:10]
for c in m['comment']:
c['update'] = parse_date(c['update'])
try:
rating = int(c['rating'])
except ValueError:
rating = 50
if rating <= 10:
c['rating'] = str(12)
elif rating <= 20:
c['rating'] = str(22)
elif rating <= 30:
c['rating'] = str(32)
elif rating <= 40:
c['rating'] = str(42)
else:
c['rating'] = str(55)
return render(request, 'detail.html', {'m': m}, )
@used_time_tag
def detail(request, uid):
if verify_oid(uid) == False:
raise Http404
if request.user.is_superuser:
return build_detail_html(request, uid)
try:
mi, created = MovieInfo.objects.get_or_create(id=uid, defaults={'title': 'title_occupy',
'timestamp': datetime.datetime.now()})
if created:
collection = settings.MONGOINFO
m = collection.find_one({'_id': ObjectId(uid)}, {'title': 1})
mi.title = m['title']
mi.visitor = F('visitor') + 1
mi.save()
except Exception as e:
logger.info('create movie info object for count fail')
file_path = os.path.join(settings.HTML_DIR, uid)
if os.path.isfile(file_path):
with open(file_path, 'rb') as f:
return HttpResponse(f.read())
else:
response = build_detail_html(request, uid)
with open(file_path, 'wb') as f:
f.write(response.content)
return response
@csrf_exempt
def download(request):
try:
if not urlparse.urlparse(request.META['HTTP_REFERER']).path.startswith("/detail/"):
return HttpResponseRedirect("/")
except KeyError:
return HttpResponseRedirect("/")
try:
c = request.GET['c']
k = c + '.uid'
v = request.GET['uid']
except KeyError as e:
raise Http404
try:
collection = settings.MONGORESOURCE
r = collection.find_one({k: v}, {'_id': 0, c: {'$elemMatch': {'uid': v}}})[c][0]
r['magnet'] = False
except Exception as e:
raise Http404
try:
if r['link'].startswith('magnet'):
r['magnet'] = True
# if user_agent_parse(request.META['HTTP_USER_AGENT']).is_pc == False:
if re_ua.findall(request.META['HTTP_USER_AGENT']):
urn = re_urn.findall(r['link'])[0].lower()
urn_file = os.path.join(urn[:2], urn + '.torrent')
full_path = os.path.join(settings.TORRENT_DIR, urn_file)
if os.path.isfile(full_path):
r['urn'] = urn
r['torrent'] = urn_file.replace("\\", '/')
except Exception as e:
print e
return render(request, 'download.html', r, )
@csrf_exempt
def torrent_download(request):
try:
if not urlparse.urlparse(request.META['HTTP_REFERER']).path.startswith("/download/"):
return HttpResponseRedirect("/")
except KeyError:
return HttpResponseRedirect("/")
if request.method == 'POST':
urn = request.POST.get('urn', '')
if urn == '':
raise Http404
path = request.POST.get('link', None)
if os.path.isfile(os.path.join(settings.TORRENT_DIR, path)):
response = sendfile(request, os.path.join(settings.TORRENT_DIR, path))
else:
raise Http404
try:
name = u"【欧泊影视】%s.torrent" % re_file_name.sub("", request.POST.get('title', urn))
except:
name = u"【欧泊影视】电影名字未知"
if "MSIE" in request.META['HTTP_USER_AGENT']:
response['Content-Disposition'] = 'attachment; filename="' + urllib.quote_plus(name.encode('utf-8')) + '"'
else:
response['Content-Disposition'] = 'attachment; filename="' + name.encode('utf-8') + '"'
return response
return HttpResponseRedirect("/")
| [
"pczhaoyun@gmail.com"
] | pczhaoyun@gmail.com |
d6f9726289c482ee28560352f61c27e948ee4c0f | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /6hnrKRh7fZfMC5CKY_12.py | 826d0c825fe9b56642774cdbea1608c929276213 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,197 | py | """
Given an integer, return a new **integer** according to the rules below:
* Split the number into groups of two digit numbers. If the number has an _odd_ number of digits, return `"invalid"`.
* For each group of two digit numbers, concatenate the _last digit_ to a new string the same number of times as the value of the _first digit_.
* Return the result as an _integer_.
look_and_say(3132) ➞ 111222
# By reading the number digit by digit, you get three "1" and three "2".
# Therefore, you put three ones and three two's together.
# Remember to return an integer.
### Examples
look_and_say(95) ➞ 555555555
look_and_say(1213141516171819) ➞ 23456789
look_and_say(120520) ➞ 200
look_and_say(231) ➞ "invalid"
### Notes
* Note that the number **0** can be included (see example #3).
* Check the **Resources** tab for a TED-Ed video for extra clarity.
"""
def look_and_say(n):
n_s = str(n)
if len(n_s) % 2 != 0:
return "invalid"
else:
var = ""
i = 0
while i < len(n_s):
j = 0
while j < int(n_s[i]):
var = var + n_s[i+1]
j = j + 1
i = i + 2
return int(var)
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
ba0a07d4363c1a6a51d8418e9bcbcfe4b2118f86 | 5c24dfc11c855fa0e4f196bc2e38661842761ab8 | /backend/ImproveContent/DecreaseAnt.py | 5b1d3d9dbe70ab765c1ed4bd5d2d7864c3579ab1 | [] | no_license | CharlesRajendran/go-viral | 2ad7bbdaf1b7d9dbfa0330012dba2740dbe10952 | 05ced94ac1b97df965232f3c44e341a980e58b3c | refs/heads/master | 2020-03-20T01:58:58.616333 | 2018-06-12T17:32:05 | 2018-06-12T17:32:05 | 137,095,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,669 | py | import nltk
import fileinput
from nltk.corpus import wordnet as wn
from nltk.tokenize import RegexpTokenizer
from nltk.corpus import stopwords
stop_words = set(stopwords.words('english'))
def increaseAnger(file_name):
toker = RegexpTokenizer(r'\w+')
words = toker.tokenize(file_name)
allowed_types = ["JJ", "JJR", "JJS", "NN", "NNS", "RB", "RBR", "RBS", "VB", "VBD", "VBG", "VBN", "VBP", "VBZ"]
filtered_words = []
#stopwords removal
for w in words:
if w not in stop_words:
filtered_words.append(w)
pos = nltk.pos_tag(filtered_words)
allowed_words = []
for p in pos:
if p[1] in allowed_types:
allowed_words.append(p[0].lower())
allowed_words_with_so = [[]]
for line in fileinput.input("anticipationSO.txt"):
chunks = line.split()
if chunks[0] in allowed_words:
allowed_words_with_so.append([chunks[0], chunks[1]])
recommended_words = [[]]
#to remove empty elements
awwso = list(filter(None, allowed_words_with_so))
#to remove duplicates
al = set(tuple(element) for element in awwso)
for el in al:
print(el[0])
syn = wn.synsets(el[0])
for sy in syn:
for le in sy.lemmas():
for line in fileinput.input("anticipationSO.txt"):
chunks = line.split()
if (chunks[0] == le.name())and (int(chunks[1])<int(el[1])):
recommended_words.append([el[0], el[1], le.name(), chunks[1]])
fileinput.close()
return recommended_words
#print(increaseAnger(open("23.txt","r").read()))
| [
"charlesrajendran44@gmail.com"
] | charlesrajendran44@gmail.com |
b1d386ee77ceb6075eacc7e5a106f8fefd6a2ee1 | 2ca5ba967211d4ff37fc530690041503f0e5ec69 | /server/evalSentiment/train.py | a846acbfa44fba7588596c40522bd88f9cfdd467 | [] | no_license | jessica486424/Gerli | 38c0bfab1133212af71fa740e8d2496f752d3c74 | 89172e40ea00ac3439dca99364e80d8a6e6d6465 | refs/heads/master | 2021-01-11T08:15:20.011612 | 2017-03-28T23:49:16 | 2017-03-28T23:49:16 | 69,801,970 | 1 | 0 | null | 2016-10-02T14:34:27 | 2016-10-02T14:34:27 | null | UTF-8 | Python | false | false | 8,225 | py | #! /usr/bin/env python
import tensorflow as tf
import numpy as np
import os
import time
import datetime
import data_helpers
from text_cnn import TextCNN
from tensorflow.contrib import learn
# Parameters
# ==================================================
# Data loading params
tf.flags.DEFINE_float("dev_sample_percentage", .1, "Percentage of the training data to use for validation")
tf.flags.DEFINE_string("positive_data_file", "./data/rt-polaritydata/rt-polarity.pos", "Data source for the positive data.")
tf.flags.DEFINE_string("negative_data_file", "./data/rt-polaritydata/rt-polarity.neg", "Data source for the positive data.")
# Model Hyperparameters
tf.flags.DEFINE_integer("embedding_dim", 128, "Dimensionality of character embedding (default: 128)")
tf.flags.DEFINE_string("filter_sizes", "3,4,5", "Comma-separated filter sizes (default: '3,4,5')")
tf.flags.DEFINE_integer("num_filters", 128, "Number of filters per filter size (default: 128)")
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (default: 0.5)")
tf.flags.DEFINE_float("l2_reg_lambda", 0.0, "L2 regularizaion lambda (default: 0.0)")
# Training parameters
tf.flags.DEFINE_integer("batch_size", 64, "Batch Size (default: 64)")
tf.flags.DEFINE_integer("num_epochs", 10, "Number of training epochs (default: 200)")
tf.flags.DEFINE_integer("evaluate_every", 100, "Evaluate model on dev set after this many steps (default: 100)")
tf.flags.DEFINE_integer("checkpoint_every", 100, "Save model after this many steps (default: 100)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
for attr, value in sorted(FLAGS.__flags.items()):
print("{}={}".format(attr.upper(), value))
print("")
# Data Preparatopn
# ==================================================
# Load data
print("Loading data...")
x_text, y = data_helpers.load_data_and_labels(FLAGS.positive_data_file, FLAGS.negative_data_file)
# Build vocabulary
max_document_length = max([len(x.split(" ")) for x in x_text])
vocab_processor = learn.preprocessing.VocabularyProcessor(max_document_length)
x = np.array(list(vocab_processor.fit_transform(x_text)))
# Randomly shuffle data
np.random.seed(10)
shuffle_indices = np.random.permutation(np.arange(len(y)))
x_shuffled = x[shuffle_indices]
y_shuffled = y[shuffle_indices]
# Split train/test set
# TODO: This is very crude, should use cross-validation
dev_sample_index = -1 * int(FLAGS.dev_sample_percentage * float(len(y)))
x_train, x_dev = x_shuffled[:dev_sample_index], x_shuffled[dev_sample_index:]
y_train, y_dev = y_shuffled[:dev_sample_index], y_shuffled[dev_sample_index:]
print("Vocabulary Size: {:d}".format(len(vocab_processor.vocabulary_)))
print("Train/Dev split: {:d}/{:d}".format(len(y_train), len(y_dev)))
# Training
# ==================================================
with tf.Graph().as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement,
intra_op_parallelism_threads=8)
sess = tf.Session(config=session_conf)
with sess.as_default():
cnn = TextCNN(
sequence_length=x_train.shape[1],
num_classes=y_train.shape[1],
vocab_size=len(vocab_processor.vocabulary_),
embedding_size=FLAGS.embedding_dim,
filter_sizes=list(map(int, FLAGS.filter_sizes.split(","))),
num_filters=FLAGS.num_filters,
l2_reg_lambda=FLAGS.l2_reg_lambda)
# Define Training procedure
global_step = tf.Variable(0, name="global_step", trainable=False)
optimizer = tf.train.AdamOptimizer(1e-3)
grads_and_vars = optimizer.compute_gradients(cnn.loss)
train_op = optimizer.apply_gradients(grads_and_vars, global_step=global_step)
# Keep track of gradient values and sparsity (optional)
grad_summaries = []
for g, v in grads_and_vars:
if g is not None:
grad_hist_summary = tf.histogram_summary("{}/grad/hist".format(v.name), g)
sparsity_summary = tf.scalar_summary("{}/grad/sparsity".format(v.name), tf.nn.zero_fraction(g))
grad_summaries.append(grad_hist_summary)
grad_summaries.append(sparsity_summary)
grad_summaries_merged = tf.merge_summary(grad_summaries)
# Output directory for models and summaries
timestamp = str(int(time.time()))
out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp))
out_dir = os.path.abspath(os.path.join(os.path.curdir, "trainingOutput"))
print("Writing to {}\n".format(out_dir))
# Summaries for loss and accuracy
loss_summary = tf.scalar_summary("loss", cnn.loss)
acc_summary = tf.scalar_summary("accuracy", cnn.accuracy)
# Train Summaries
train_summary_op = tf.merge_summary([loss_summary, acc_summary, grad_summaries_merged])
train_summary_dir = os.path.join(out_dir, "summaries", "train")
train_summary_writer = tf.train.SummaryWriter(train_summary_dir, sess.graph)
# Dev summaries
dev_summary_op = tf.merge_summary([loss_summary, acc_summary])
dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
dev_summary_writer = tf.train.SummaryWriter(dev_summary_dir, sess.graph)
# Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it
checkpoint_dir = os.path.abspath(os.path.join(out_dir, "checkpoints"))
checkpoint_prefix = os.path.join(checkpoint_dir, "model")
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.all_variables())
# Write vocabulary
vocab_processor.save(os.path.join(out_dir, "vocab"))
# Initialize all variables
sess.run(tf.initialize_all_variables())
def train_step(x_batch, y_batch):
"""
A single training step
"""
feed_dict = {
cnn.input_x: x_batch,
cnn.input_y: y_batch,
cnn.dropout_keep_prob: FLAGS.dropout_keep_prob
}
_, step, summaries, loss, accuracy = sess.run(
[train_op, global_step, train_summary_op, cnn.loss, cnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
train_summary_writer.add_summary(summaries, step)
def dev_step(x_batch, y_batch, writer=None):
"""
Evaluates model on a dev set
"""
feed_dict = {
cnn.input_x: x_batch,
cnn.input_y: y_batch,
cnn.dropout_keep_prob: 1.0
}
step, summaries, loss, accuracy = sess.run(
[global_step, dev_summary_op, cnn.loss, cnn.accuracy],
feed_dict)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
if writer:
writer.add_summary(summaries, step)
# Generate batches
batches = data_helpers.batch_iter(
list(zip(x_train, y_train)), FLAGS.batch_size, FLAGS.num_epochs)
# Training loop. For each batch...
for batch in batches:
#print zip(*batch)
x_batch, y_batch = zip(*batch)
train_step(x_batch, y_batch)
current_step = tf.train.global_step(sess, global_step)
if current_step % FLAGS.evaluate_every == 0:
print("\nEvaluation:")
dev_step(x_dev, y_dev, writer=dev_summary_writer)
print("")
if current_step % FLAGS.checkpoint_every == 0:
path = saver.save(sess, checkpoint_prefix, global_step=current_step)
print("Saved model checkpoint to {}\n".format(path))
| [
"a6214123@gmail.com"
] | a6214123@gmail.com |
179ff15a59f3fa06312c64996a9151b248133d51 | 990b53061c912397312e505f3c483fea0ed07ec1 | /env/lib/python3.6/stat.py | 76ef3fbdf5dbc60b74e96c591a861cefe9edc7fa | [] | no_license | YauheniRusinchuk/flask_project | eedaf0688a98f298d980abcb5605d6d891dcd21c | d8ff22e6ee7b197099256aa28c909d8a1856a95a | refs/heads/master | 2020-03-29T19:14:14.809477 | 2018-09-26T15:04:26 | 2018-09-26T15:04:26 | 150,253,701 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40 | py | /home/yr/anaconda3/lib/python3.6/stat.py | [
"ruevgal@gmail.com"
] | ruevgal@gmail.com |
08f3c63df0a0b2bb7b4b07f7f0a824e1c895fbf1 | 10199a6ffc89c3fe3dd8747385989f6dfa354b3e | /nornir/plugins/tasks/apis/http_method.py | e4c50292e393e3dbe5b66041c6479eab3a914a00 | [
"Apache-2.0"
] | permissive | billgrant/nornir | 134f151818b444cee6a46ef80b4cc3ec73da3262 | 837bf85902d1776022d19f460ebe559884a9ffbe | refs/heads/develop | 2020-04-08T09:45:50.163417 | 2018-11-26T08:31:18 | 2018-11-26T08:31:18 | 159,238,505 | 0 | 0 | Apache-2.0 | 2018-11-26T21:59:29 | 2018-11-26T21:59:26 | Python | UTF-8 | Python | false | false | 1,553 | py | from nornir.core.task import Result
import requests
def http_method(task=None, method="get", url="", raise_for_status=True, **kwargs):
"""
This is a convenience task that uses `requests <http://docs.python-requests.org/en/master/>`_ to
interact with an HTTP server.
Arguments:
method (string): HTTP method to call
url (string): URL to connect to
raise_for_status (bool): Whether to call `raise_for_status
<http://docs.python-requests.org/en/master/api/#requests.Response.raise_for_status>`_
method automatically or not. For quick reference, raise_for_status will consider an
error if the return code is any of 4xx or 5xx
**kwargs: Keyword arguments will be passed to the `request
<http://docs.python-requests.org/en/master/api/#requests.request>`_
method
Returns:
:obj:`nornir.core.task.Result`:
* result (``string/dict``): Body of the response. Either text or a dict if the
response was a json object
* reponse (object): Original `Response
<http://docs.python-requests.org/en/master/api/#requests.Response>`_
"""
r = requests.request(method, url, **kwargs)
if raise_for_status:
r.raise_for_status()
try:
content_type = r.headers["Content-type"]
except KeyError:
content_type = "text"
result = r.json() if "application/json" == content_type else r.text
return Result(host=task.host if task else None, response=r, result=result)
| [
"dbarrosop@dravetech.com"
] | dbarrosop@dravetech.com |
f5be676611bbe6c6f91ce40e74a2f5a2523b9938 | 3fe272eea1c91cc5719704265eab49534176ff0d | /scripts/portal/enterHRpt.py | 416069eee82ed5292391535cd8932742f8aea17a | [
"MIT"
] | permissive | Bratah123/v203.4 | e72be4843828def05592298df44b081515b7ca68 | 9cd3f31fb2ef251de2c5968c75aeebae9c66d37a | refs/heads/master | 2023-02-15T06:15:51.770849 | 2021-01-06T05:45:59 | 2021-01-06T05:45:59 | 316,366,462 | 1 | 0 | MIT | 2020-12-18T17:01:25 | 2020-11-27T00:50:26 | Java | UTF-8 | Python | false | false | 252 | py | # 222020000 - Ludi tower: Helios Tower <Library> (CoK 3rd job portal)
if not sm.hasQuest(20881): # 3rd job quest
sm.chat("Only knights looking to job advance to the third job may enter here.")
else:
sm.warpInstanceIn(922030400, 0)
sm.dispose()
| [
"pokesmurfuwu@gmail.com"
] | pokesmurfuwu@gmail.com |
692ce7baaccecf845f9c396f1e159091422e7e9f | 83977dad449cfb0d0401194bad9fdf3d5b794118 | /channelfilter/channelfilter.py | e0d8daf8e74b54a33893928c5413c56903be7d99 | [
"MIT"
] | permissive | Rick7C2/TWO-Cogs | 935fcb30ba2b6a929a77cbc50f794b807168afee | 5c5d470ac96780fbc05bb59ba3fa532f01587c81 | refs/heads/master | 2020-03-20T11:10:14.131098 | 2018-06-15T00:29:59 | 2018-06-15T00:29:59 | 137,394,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,196 | py | # -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2017 SML
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import os
from collections import defaultdict
import discord
from cogs.utils import checks
from cogs.utils.dataIO import dataIO
from discord.ext import commands
PATH = os.path.join("data", "channelfilter")
JSON = os.path.join(PATH, "settings.json")
def nested_dict():
"""Recursively nested defaultdict."""
return defaultdict(nested_dict)
class ChannelFilter:
"""Channelf filter"""
def __init__(self, bot):
"""Init."""
self.bot = bot
self.settings = dataIO.load_json(JSON)
def init_server_settings(self, server):
self.settings[server.id] = {}
dataIO.save_json(JSON, self.settings)
def get_server_settings(self, server):
"""Return server settings."""
if server.id not in self.settings:
self.settings[server.id] = {}
dataIO.save_json(JSON, self.settings)
return self.settings[server.id]
def get_channel_settings(self, server, channel):
"""Return channel settings."""
server_settings = self.get_server_settings(server)
if channel.id not in server_settings:
self.settings[server.id][channel.id] = {}
dataIO.save_json(JSON, self.settings)
return self.settings[server.id][channel.id]
def add_word(self, server, channel, word, reason=None):
"""Add word to filter."""
channel_settings = self.get_channel_settings(server, channel)
channel_settings[word.lower()] = {
'reason': reason
}
dataIO.save_json(JSON, self.settings)
def remove_word(self, server, channel, word):
"""Remove word from filter."""
channel_settings = self.get_channel_settings(server, channel)
success = channel_settings.pop(word, None)
dataIO.save_json(JSON, self.settings)
if success is None:
return False
else:
return True
@checks.mod_or_permissions()
@commands.group(pass_context=True, aliases=['cf', 'cfilter'])
async def channelfilter(self, ctx):
"""Filter words by channel."""
if ctx.invoked_subcommand is None:
await self.bot.send_cmd_help(ctx)
@checks.is_owner()
@channelfilter.command(name="init", pass_context=True)
async def channelfilter_init(self, ctx):
"""Init server settings."""
server = ctx.message.server
self.init_server_settings(server)
await self.bot.say("Settings initialized.")
@checks.mod_or_permissions()
@channelfilter.command(name="add", pass_context=True, no_pm=True)
async def channelfilter_add(self, ctx, word, reason=None):
"""Add words."""
server = ctx.message.server
channel = ctx.message.channel
self.add_word(server, channel, word, reason=reason)
await self.bot.say("Added word to filter.")
@checks.mod_or_permissions()
@channelfilter.command(name="remove", pass_context=True, no_pm=True)
async def channelfilter_remove(self, ctx, word):
"""Remove words."""
server = ctx.message.server
channel = ctx.message.channel
success = self.remove_word(server, channel, word)
if success:
await self.bot.say("Removed word from filter.")
else:
await self.bot.say("Cannot find that word in filter.")
@checks.mod_or_permissions()
@channelfilter.command(name="list", pass_context=True, no_pm=True)
async def channelfilter_list(self, ctx):
"""Words filtered in channel."""
server = ctx.message.server
channel = ctx.message.channel
channel_settings = self.get_channel_settings(server, channel)
if len(channel_settings.keys()) == 0:
await self.bot.say("No words are filtered here.")
return
await self.bot.say(", ".join(channel_settings.keys()))
@checks.mod_or_permissions()
@channelfilter.command(name="listserver", pass_context=True, no_pm=True)
async def channelfilter_listserver(self, ctx):
"""Words filtered on server."""
server = ctx.message.server
server_settings = self.get_server_settings(server)
out = []
for channel_id in server_settings:
channel = self.bot.get_channel(channel_id)
channel_settings = self.get_channel_settings(server, channel)
if len(channel_settings):
out.append("{}: {}".format(channel.mention, ", ".join(channel_settings)))
if not len(out):
await self.bot.say("Nothing is filtered on this server.")
return
await self.bot.say(", ".join(out))
async def on_message(self, message):
"""Filter words by channel."""
server = message.server
channel = message.channel
author = message.author
if server is None or self.bot.user == author:
return
valid_user = isinstance(author, discord.Member) and not author.bot
# Ignore bots
if not valid_user:
return
# Ignore people with manage messages perms
if author.server_permissions.manage_messages:
return
channel_settings = self.get_channel_settings(server, channel)
if not isinstance(channel_settings, dict):
return
for word in channel_settings.keys():
if word.lower() in message.content.lower():
reason = channel_settings[word].get('reason', 'that')
await self.bot.send_message(
channel,
"{} {}. "
"Repeat offenders will be kicked/banned.".format(
author.mention,
reason
))
await self.bot.delete_message(message)
def check_folder():
"""Check folder."""
os.makedirs(PATH, exist_ok=True)
def check_file():
"""Check files."""
if not dataIO.is_valid_json(JSON):
dataIO.save_json(JSON, {})
def setup(bot):
"""Setup."""
check_folder()
check_file()
n = ChannelFilter(bot)
bot.add_cog(n)
| [
"smlbiobot@gmail.com"
] | smlbiobot@gmail.com |
d30fbfbfc9bc1ad2db4327b992818ba0277297e0 | 178eeebc29c3b5501505d1508f52f52c7d62ffdc | /Code/problem17.py | 7eba0e3006b02503fb71a59a6fa7fa104258c31a | [] | no_license | krishnakalyan3/BSG | 926bcb312974943478107c9a15b47dce737726ca | 68f9a853ae803e8d2943bcb8ec31842acbdc2813 | refs/heads/master | 2021-01-11T19:58:35.152345 | 2017-01-19T09:05:28 | 2017-01-19T09:05:28 | 79,435,188 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | #!/usr/bin/env python2
# Counting Point Mutations
def hamming(x,y):
count = 0
for x1, y1 in zip(x, y):
if (x1 != y1):
count += 1
return count
if __name__ == '__main__':
dataset = open("/Users/krishna/Downloads/rosalind_hamm.txt").read()
#dataset = 'GAGCCTACTAACGGGAT\nCATCGTAATGACGGCCT'
split_data = dataset.split('\n')
print hamming(split_data[0], split_data[1]) | [
"krishnakalyan3@gmail.com"
] | krishnakalyan3@gmail.com |
0ac29342f999a38d4b563cd8fac442fcbc10d64d | 8ebf6311c3c1db40c7bb56051cf4e37e1b85a4f9 | /rm-server/gateway/gateway/router/projectmanager/project/user/list.py | 32c198c00b991b4d2b88403cc7cb22d51600f615 | [] | no_license | sq591442679/requirements-manager | e8b074afb7fd2a83632f2546d392dab4c35aeeeb | 6d664ce338b455150dcc9a86145967e8dd67a9dd | refs/heads/master | 2023-07-08T04:38:20.064019 | 2021-08-11T03:41:13 | 2021-08-11T03:41:13 | 392,877,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | from flask import request
from gateway.app import app
from gateway.http_client import projectmanager_http_client
from gateway.utils.handle_api import (
get_client_username, handle_request_response
)
@app.route('/project/user/list', methods=['GET'])
@handle_request_response
@get_client_username
def project_user_list(client_username: str):
args = request.args.to_dict()
status_code, resp_body = projectmanager_http_client.get(
'project/user/list', client_username, params=args
)
return status_code, resp_body
| [
"591442679@qq.com"
] | 591442679@qq.com |
270cd817464fdfc04fa940e1f3291e2453365dc8 | 42b920d39f6fa79b1b6f8805c788599122e353db | /transcode movie/TV Show/encryption/new.py | 7eb5cec57b311ef1d36c18f5cf2713e0a0de5f02 | [] | no_license | analyticsbot/Python-Code---Part-10 | 62a167beb4326824429edf50f2004256103ef9d6 | 979853c419ed1073b046c65a095131e1e3a2769e | refs/heads/master | 2021-06-05T18:22:30.696083 | 2016-08-27T02:42:10 | 2016-08-27T02:42:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | from Crypto.Cipher import AES
from Crypto.Util import Counter
key = '7842f0a1ebc38f44e3e0c81943f68582'.decode('hex')
iv = '7842f0a1ebc38f44'.decode('hex')
ctr_e = Counter.new(64, prefix=iv, initial_value=0)
encryptor = AES.new(key, AES.MODE_CBC, counter=ctr_e)
with open('Grass.out.jpg', 'wb') as fout:
with open('Grass.jpg', 'rb') as fin:
fout.write(encryptor.encrypt(fin.read()))
| [
"ravi.shankar1788@gmail.com"
] | ravi.shankar1788@gmail.com |
fcf79f0af457759532d20061e773bff084cc534c | 4c2c1775b6b319ae07155f46e70a6726ab0980c2 | /algo/algo_code/naga-algo/naga_interactive/script/tools/email_sender.py | 84d8d05ac3c42ef9c45cc00249239fb07efb8a8d | [] | no_license | kiminh/util | 8e4b204849a57941120e37c9330772f03c8892d0 | 763a71031d9c0ef207b87dc03ebc55208a2dd5ad | refs/heads/master | 2022-06-09T06:09:13.221754 | 2020-04-27T04:23:00 | 2020-04-27T04:23:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,747 | py | import email
import email.encoders
import email.mime.base
import email.mime.text
import logging
import smtplib
import time
import sys
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import COMMASPACE
MAIL_CONFIG = {
"SENDER": 'noreply.ads.monitor@cootek.cn',
'SMTP_SERVER': "smtp.partner.outlook.cn",
"USER_NAME": "noreply.ads.monitor@cootek.cn",
"PASSWORD": "ApdAll666",
"EMAIL_PORT": "587",
"EMAIL_RECEIVER": ["ling.fang@cootek.cn"]
}
class MailSender(object):
""" MysqlUtilHandler """
def __init__(self, smtp_server=MAIL_CONFIG["SMTP_SERVER"], port=MAIL_CONFIG["EMAIL_PORT"],
user=MAIL_CONFIG["USER_NAME"], pwd=MAIL_CONFIG["PASSWORD"],
sender=MAIL_CONFIG["SENDER"], receiver_list=MAIL_CONFIG["EMAIL_RECEIVER"]):
self.smtp_server = smtp_server
self.port = port
self.user = user
self.pwd = pwd
self.sender = sender
self.receiver_list = receiver_list
self.smtp = None
def init(self):
""" init """
self.smtp = smtplib.SMTP(timeout=70)
self.smtp.connect(self.smtp_server, self.port)
self.smtp.starttls()
self.smtp.set_debuglevel(0)
def send_email(self, subject, msg, file_names=[], prefix=''):
""" send_email """
msg_root = MIMEMultipart('related')
msg_root['Subject'] = subject
msg_root['To'] = COMMASPACE.join(self.receiver_list)
msg_text = MIMEText('%s' % msg, 'html', 'utf-8')
msg_root.attach(msg_text)
for file_name in file_names:
suffix = file_name
file_name = prefix + file_name
fp = open(file_name, 'rb')
file1 = email.mime.base.MIMEBase('application', 'vnd.ms-excel')
file1.set_payload(fp.read())
fp.close()
email.encoders.encode_base64(file1)
str1 = 'attachment;filename=' + suffix
file1.add_header('Content-Disposition', str1)
msg_root.attach(file1)
while True:
try:
self.smtp.login(self.user, self.pwd)
self.smtp.sendmail(self.sender, self.receiver_list, msg_root.as_string())
break
except Exception as e:
print(e)
try:
time.sleep(20)
self.smtp.connect()
except Exception as e:
logging.error("failed to login to smtp server, e: %s" % str(e))
if __name__ == "__main__":
subject = sys.argv[1]
text_body = sys.argv[2]
mailsender = MailSender()
mailsender.init()
mailsender.send_email(subject=subject, msg=text_body)
| [
"ling@lingdeMacBook-Air.local"
] | ling@lingdeMacBook-Air.local |
bf517a269f1e1e2e3035381c5d0e2b41f3f1e470 | 82195c2a1fce4ec92bc843c815bf06f7bd9c782a | /test/functional/bitcoin_cli.py | 3ae91a61f6f43ec06248773e17ab4b3a8d1b25dd | [
"MIT"
] | permissive | cashgoldcoin/cashgoldcoin | 9ca2947ff000451182478afeb0726ebd07cdc274 | ec774b51a09379d2b9d16aaca6edc7e3661a64ab | refs/heads/master | 2020-04-06T07:21:11.480655 | 2018-11-12T20:55:19 | 2018-11-12T20:55:19 | 155,448,765 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test bitcoin-cli"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class TestBitcoinCli(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
"""Main test logic"""
self.log.info("Compare responses from getinfo RPC and `cashgoldcoin-cli getinfo`")
cli_get_info = self.nodes[0].cli.getinfo()
rpc_get_info = self.nodes[0].getinfo()
assert_equal(cli_get_info, rpc_get_info)
if __name__ == '__main__':
TestBitcoinCli().main()
| [
"you@example.com"
] | you@example.com |
604d6e7c5b0459a745e960c8955855b1864e6b09 | aa57b25d0b040dc339ef395d5d17bfdf3723f825 | /Tools/DGLPyTorch/SyntheticGraphGeneration/syngen/utils/types/dataframe_type.py | 9446d7ce4cf8ad2f1233d90193f2127293336a5f | [] | no_license | nv-kkudrynski/DeepLearningExamples | 29f9be9e6560e47f19e1cfc9f27d68b383b51b97 | c7400425ff63d4bce3a2207ff6da6e4d3365f9ba | refs/heads/master | 2023-04-29T14:11:35.498962 | 2023-04-18T17:04:45 | 2023-04-18T17:04:45 | 186,789,312 | 2 | 0 | null | 2023-04-18T16:50:23 | 2019-05-15T09:01:45 | Jupyter Notebook | UTF-8 | Python | false | false | 814 | py | # Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Union
import cudf
import dask
import dask_cudf
import pandas
DataFrameType = Union[
cudf.DataFrame,
dask_cudf.DataFrame,
pandas.DataFrame,
dask.dataframe.DataFrame,
]
| [
"kkudrynski@nvidia.com"
] | kkudrynski@nvidia.com |
2d8ae82ee205c9f5daffdedda2ad91f75f17f1a9 | 9d454ae0d5dd1d7e96e904ced80ca502019bb659 | /1588_sumOddLengthSubarrays.py | 8cb3fb3c870e0f20cfe1e03d33631e56dd2254c2 | [] | no_license | zzz686970/leetcode-2018 | dad2c3db3b6360662a90ea709e58d7facec5c797 | 16e4343922041929bc3021e152093425066620bb | refs/heads/master | 2021-08-18T08:11:10.153394 | 2021-07-22T15:58:52 | 2021-07-22T15:58:52 | 135,581,395 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | class Solution:
def sumOddLengthSubarrays(self, arr: List[int]) -> int:
length = len(arr)
res = 0
for i, val in enumerate(arr, 1):
res += (i * (length - i + 1) + 1) // 2 * val
return res
| [
"1564256031@qq.com"
] | 1564256031@qq.com |
77527a553d29938346edbcc692edd89fda02de2e | 77311ad9622a7d8b88707d7cee3f44de7c8860cb | /res/scripts/common/goodies/goodie_helpers.py | 2ef1ee6c219569bdef95cd2ff114b915d5135da2 | [] | no_license | webiumsk/WOT-0.9.14-CT | 9b193191505a4560df4e872e022eebf59308057e | cfe0b03e511d02c36ce185f308eb48f13ecc05ca | refs/heads/master | 2021-01-10T02:14:10.830715 | 2016-02-14T11:59:59 | 2016-02-14T11:59:59 | 51,606,676 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 4,542 | py | # 2016.02.14 12:44:31 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/goodies/goodie_helpers.py
from collections import namedtuple
from debug_utils import LOG_ERROR
from Goodies import GoodieException
from GoodieConditions import MaxVehicleLevel
from GoodieDefinition import GoodieDefinition
from GoodieResources import Gold, Credits, Experience, CrewExperience, FreeExperience
from GoodieTargets import BuyPremiumAccount, BuySlot, PostBattle, BuyGoldTankmen, FreeExperienceConversion
from GoodieValue import GoodieValue
from goodie_constants import GOODIE_TARGET_TYPE, GOODIE_CONDITION_TYPE, GOODIE_RESOURCE_TYPE
GoodieData = namedtuple('GoodieData', 'variety target enabled lifetime useby limit autostart condition resource')
_CONDITIONS = {GOODIE_CONDITION_TYPE.MAX_VEHICLE_LEVEL: MaxVehicleLevel}
_TARGETS = {GOODIE_TARGET_TYPE.ON_BUY_PREMIUM: BuyPremiumAccount,
GOODIE_TARGET_TYPE.ON_BUY_SLOT: BuySlot,
GOODIE_TARGET_TYPE.ON_POST_BATTLE: PostBattle,
GOODIE_TARGET_TYPE.ON_BUY_GOLD_TANKMEN: BuyGoldTankmen,
GOODIE_TARGET_TYPE.ON_FREE_XP_CONVERSION: FreeExperienceConversion}
_RESOURCES = {GOODIE_RESOURCE_TYPE.GOLD: Gold,
GOODIE_RESOURCE_TYPE.CREDITS: Credits,
GOODIE_RESOURCE_TYPE.XP: Experience,
GOODIE_RESOURCE_TYPE.CREW_XP: CrewExperience,
GOODIE_RESOURCE_TYPE.FREE_XP: FreeExperience}
GOODIE_CONDITION_TO_TEXT = {MaxVehicleLevel: 'max_vehicle_level'}
GOODIE_RESOURCE_TO_TEXT = {Gold: 'gold',
Credits: 'credits',
Experience: 'experience',
CrewExperience: 'crew_experience',
FreeExperience: 'free_experience'}
GOODIE_TARGET_TO_TEXT = {BuyPremiumAccount: 'premium',
BuySlot: 'slot',
PostBattle: 'post_battle',
BuyGoldTankmen: 'gold_tankmen',
FreeExperienceConversion: 'free_xp_conversion'}
GOODIE_TEXT_TO_CONDITION = {'max_vehicle_level': GOODIE_CONDITION_TYPE.MAX_VEHICLE_LEVEL}
GOODIE_TEXT_TO_RESOURCE = {'credits': GOODIE_RESOURCE_TYPE.CREDITS,
'experience': GOODIE_RESOURCE_TYPE.XP,
'crew_experience': GOODIE_RESOURCE_TYPE.CREW_XP,
'free_experience': GOODIE_RESOURCE_TYPE.FREE_XP,
'gold': GOODIE_RESOURCE_TYPE.GOLD}
GOODIE_TEXT_TO_TARGET = {'premium': GOODIE_TARGET_TYPE.ON_BUY_PREMIUM,
'slot': GOODIE_TARGET_TYPE.ON_BUY_SLOT,
'post_battle': GOODIE_TARGET_TYPE.ON_POST_BATTLE,
'gold_tankmen': GOODIE_TARGET_TYPE.ON_BUY_GOLD_TANKMEN,
'free_xp_conversion': GOODIE_TARGET_TYPE.ON_FREE_XP_CONVERSION}
class NamedGoodieData(GoodieData):
def getTargetValue(self):
if self.target[0] == GOODIE_TARGET_TYPE.ON_BUY_PREMIUM:
return int(self.target[1].split('_')[1])
else:
return self.target[1]
@property
def targetID(self):
return self.target[0]
def loadDefinitions(d):
goodies = {}
for uid, d in d.iteritems():
v_variety, v_target, v_enabled, v_lifetime, v_useby, v_limit, v_autostart, v_condition, v_resource = d
if v_condition is not None:
condition = _CONDITIONS.get(v_condition[0])(v_condition[1])
else:
condition = None
target = _TARGETS[v_target[0]](v_target[1], v_target[2])
resource = _RESOURCES[v_resource[0]]
if v_resource[2]:
value = GoodieValue.percent(v_resource[1])
else:
value = GoodieValue.absolute(v_resource[1])
goodies[uid] = GoodieDefinition(uid=uid, variety=v_variety, target=target, enabled=v_enabled, lifetime=v_lifetime, useby=v_useby, counter=v_limit, autostart=v_autostart, resource=resource, value=value, condition=condition)
return goodies
def getPriceWithDiscount(price, value):
if value[2]:
result = int(price - price * (value[1] / float(100)))
if result < 0:
return 0
else:
return result
else:
return price - value[1]
def getPremiumCost(premiumCosts, goodie):
if goodie.target[0] == GOODIE_TARGET_TYPE.ON_BUY_PREMIUM:
price = premiumCosts.get(goodie.getTargetValue(), None)
if price is None:
return
return getPriceWithDiscount(price, goodie.resource)
else:
return
def loadPdata(pdataGoodies, goodies, logID):
for uid, goodie in pdataGoodies.iteritems():
try:
goodies.load(uid, goodie[0], goodie[1], goodie[2])
except GoodieException as detail:
LOG_ERROR('Cannot load a goodie', detail, logID)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\common\goodies\goodie_helpers.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:44:31 Střední Evropa (běžný čas)
| [
"info@webium.sk"
] | info@webium.sk |
0cbce35c5daef0797c0b72a8d1e35e41e8716bc0 | da478f410908814c9be8044696dd2077889e3f82 | /0x1F-pascal_triangle/0-pascal_triangle.py | cc48432904bdc874fddaec8afd20dfd1008d73f2 | [] | no_license | sonnentag/holbertonschool-interview | 5c1c454cfe2e82b7bf4ba02d5aa19b5b5738d4a7 | d2d2f3159453a9c879cb1f8f205be504f53c4cae | refs/heads/main | 2023-07-18T09:06:19.653685 | 2021-08-26T16:19:11 | 2021-08-26T16:19:11 | 320,453,810 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | #!/usr/bin/python3
""" 0x1F-pascal_triangle
"""
def pascal_triangle(n):
''' generate pascal's triangle of n levels deep
'''
triangle = []
if n <= 0:
return triangle
for x in range(1, n + 1):
row = []
col = 1
for k in range(1, x + 1):
row.append(col)
col = int(col * (x - k) / k)
triangle.append(row)
return triangle
| [
"zocle@zocle.net"
] | zocle@zocle.net |
2f7f638e66f5e1b63eee06075a10ade093af2e5d | b7f3edb5b7c62174bed808079c3b21fb9ea51d52 | /third_party/blink/web_tests/external/wpt/bluetooth/generate.py | f7fffddbc301f3e555f75ad4cb2d067430628925 | [
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | otcshare/chromium-src | 26a7372773b53b236784c51677c566dc0ad839e4 | 64bee65c921db7e78e25d08f1e98da2668b57be5 | refs/heads/webml | 2023-03-21T03:20:15.377034 | 2020-11-16T01:40:14 | 2020-11-16T01:40:14 | 209,262,645 | 18 | 21 | BSD-3-Clause | 2023-03-23T06:20:07 | 2019-09-18T08:52:07 | null | UTF-8 | Python | false | false | 7,189 | py | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# TODO(509038): Delete the file in LayoutTests/bluetooth after all the script
# tests have been migrated to this directory.
"""Generator script for Web Bluetooth LayoutTests.
For each script-tests/X.js creates the following test files depending on the
contents of X.js
- getPrimaryService/X.https.window.js
- getPrimaryServices/X.https.window.js
- getPrimaryServices/X-with-uuid.https.window.js
script-tests/X.js files should contain "CALLS([variation1 | variation2 | ...])"
tokens that indicate what files to generate. Each variation in CALLS([...])
should corresponds to a js function call and its arguments. Additionally a
variation can end in [UUID] to indicate that the generated file's name should
have the -with-uuid suffix.
The PREVIOUS_CALL token will be replaced with the function that replaced CALLS.
The FUNCTION_NAME token will be replaced with the name of the function that
replaced CALLS.
For example, for the following template file:
// script-tests/example.js
promise_test(() => {
return navigator.bluetooth.requestDevice(...)
.then(device => device.gatt.CALLS([
getPrimaryService('heart_rate')|
getPrimaryServices('heart_rate')[UUID]]))
.then(device => device.gatt.PREVIOUS_CALL);
}, 'example test for FUNCTION_NAME');
this script will generate:
// getPrimaryService/example.https.window.js
promise_test(() => {
return navigator.bluetooth.requestDevice(...)
.then(device => device.gatt.getPrimaryService('heart_rate'))
.then(device => device.gatt.getPrimaryService('heart_rate'));
}, 'example test for getPrimaryService');
// getPrimaryServices/example-with-uuid.https.window.js
promise_test(() => {
return navigator.bluetooth.requestDevice(...)
.then(device => device.gatt.getPrimaryServices('heart_rate'))
.then(device => device.gatt.getPrimaryServices('heart_rate'));
}, 'example test for getPrimaryServices');
Run
$ python //third_party/WebKit/LayoutTests/bluetooth/generate.py
and commit the generated files.
"""
import fnmatch
import os
import re
import sys
import logging
TEMPLATES_DIR = 'script-tests'
class GeneratedTest:
def __init__(self, data, path, template):
self.data = data
self.path = path
self.template = template
def GetGeneratedTests():
"""Yields a GeneratedTest for each call in templates in script-tests."""
bluetooth_tests_dir = os.path.dirname(os.path.realpath(__file__))
# Read Base Test Template.
base_template_file_handle = open(
os.path.join(
bluetooth_tests_dir,
TEMPLATES_DIR,
'base_test_js.template'
), 'r')
base_template_file_data = base_template_file_handle.read().decode('utf-8')
base_template_file_handle.close()
# Get Templates.
template_path = os.path.join(bluetooth_tests_dir, TEMPLATES_DIR)
available_templates = []
for root, _, files in os.walk(template_path):
for template in files:
if template.endswith('.js'):
available_templates.append(os.path.join(root, template))
# Generate Test Files
for template in available_templates:
# Read template
template_file_handle = open(template, 'r')
template_file_data = template_file_handle.read().decode('utf-8')
template_file_handle.close()
template_name = os.path.splitext(os.path.basename(template))[0]
# Find function names in multiline pattern: CALLS( [ function_name,function_name2[UUID] ])
result = re.search(
r'CALLS\(' + # CALLS(
r'[^\[]*' + # Any characters not [, allowing for new lines.
r'\[' + # [
r'(.*?)' + # group matching: function_name(), function_name2[UUID]
r'\]\)', # adjacent closing characters: ])
template_file_data, re.MULTILINE | re.DOTALL)
if result is None:
raise Exception('Template must contain \'CALLS\' tokens')
new_test_file_data = base_template_file_data.replace('TEST',
template_file_data)
# Replace CALLS([...]) with CALLS so that we don't have to replace the
# CALLS([...]) for every new test file.
new_test_file_data = new_test_file_data.replace(result.group(), 'CALLS')
# Replace 'PREVIOUS_CALL' with 'CALLS' so that we can replace it while
# replacing CALLS.
new_test_file_data = new_test_file_data.replace('PREVIOUS_CALL', 'CALLS')
for call in result.group(1).split('|'):
# Parse call
call = call.strip()
function_name, args, uuid_suffix = re.search(r'(.*?)\((.*)\)(\[UUID\])?', call).groups()
# Replace template tokens
call_test_file_data = new_test_file_data
call_test_file_data = call_test_file_data.replace('CALLS', '{}({})'.format(function_name, args))
call_test_file_data = call_test_file_data.replace('FUNCTION_NAME', function_name)
# Get test file name
group_dir = os.path.basename(os.path.abspath(os.path.join(template, os.pardir)))
call_test_file_name = 'gen-{}{}.https.window.js'.format(template_name, '-with-uuid' if uuid_suffix else '')
call_test_file_path = os.path.join(bluetooth_tests_dir, group_dir, function_name, call_test_file_name)
yield GeneratedTest(call_test_file_data, call_test_file_path, template)
def main():
logging.basicConfig(level=logging.INFO)
previous_generated_files = set()
current_path = os.path.dirname(os.path.realpath(__file__))
for root, _, filenames in os.walk(current_path):
for filename in fnmatch.filter(filenames, 'gen-*.https.window.js'):
previous_generated_files.add(os.path.join(root, filename))
generated_files = set()
for generated_test in GetGeneratedTests():
prev_len = len(generated_files)
generated_files.add(generated_test.path)
if prev_len == len(generated_files):
logging.info('Generated the same test twice for template:\n%s',
generated_test.template)
# Create or open test file
directory = os.path.dirname(generated_test.path)
if not os.path.exists(directory):
os.makedirs(directory)
test_file_handle = open(generated_test.path, 'wb')
# Write contents
test_file_handle.write(generated_test.data.encode('utf-8'))
test_file_handle.close()
new_generated_files = generated_files - previous_generated_files
if len(new_generated_files) != 0:
logging.info('Newly generated tests:')
for generated_file in new_generated_files:
logging.info(generated_file)
obsolete_files = previous_generated_files - generated_files
if len(obsolete_files) != 0:
logging.warning('The following files might be obsolete:')
for generated_file in obsolete_files:
logging.warning(generated_file)
if __name__ == '__main__':
sys.exit(main())
| [
"commit-bot@chromium.org"
] | commit-bot@chromium.org |
54617dd96ce0cb421893e586c7a3aa26247754de | 3fbbdc377c84b974bb488ceea1f9a146fd90fdbb | /clean_test.py | 100205c6de74b79851f71a4030893b330d6d9a42 | [] | no_license | sxtech/SX-UrlImgPackage | ee7bf21bf02a1062218de5398926771e7ba7b722 | 8c9d95d4c57825a1db2df5f5394cba8dfdb38286 | refs/heads/master | 2021-01-15T10:05:39.772962 | 2016-08-18T07:22:00 | 2016-08-18T07:22:00 | 33,332,624 | 1 | 0 | null | 2016-08-18T07:22:00 | 2015-04-02T21:25:38 | Python | UTF-8 | Python | false | false | 256 | py | import time
from img_package import app
from img_package.clean_worker import CleanWorker
def test_clean_worker():
cw = CleanWorker()
cw.main()
time.sleep(10)
app.config['IS_QUIT']
if __name__ == "__main__":
test_clean_worker()
| [
"smellycat2014@foxmail.com"
] | smellycat2014@foxmail.com |
e3d180db6d65983092e1957c5bab49d204219983 | 68e0a8fbfc5bcbcd2ceaca07e420b1d4ca8d02c1 | /src/brewlog/utils/views.py | fea936bcd0ee35c3956264e976df32d0dd86d966 | [
"BSD-3-Clause"
] | permissive | zgoda/brewlog | 391e710a63b8bd1c753caee7cf56dc1e0780fcfd | cbf9d7b14f0cdfd9241ae869cb5f28c9a1e817b4 | refs/heads/master | 2022-02-21T15:39:46.778749 | 2022-02-07T11:51:07 | 2022-02-07T11:51:07 | 95,431,146 | 3 | 0 | NOASSERTION | 2020-01-14T20:42:03 | 2017-06-26T09:33:18 | Python | UTF-8 | Python | false | false | 3,413 | py | import collections
from typing import Optional
from urllib.parse import urljoin, urlparse
from flask import abort, request, session, url_for
from flask_babel import lazy_gettext as _
from itsdangerous.exc import BadSignature, SignatureExpired
from itsdangerous.url_safe import URLSafeTimedSerializer
from permission import Permission, Rule
def next_redirect(fallback_endpoint: str, *args, **kwargs) -> str:
"""Find redirect url. The order of search is request params, session and
finally url for fallback endpoint is returned if none found. Args and
kwargs are passed intact to endpoint.
:param fallback_endpoint: full endpoint specification
:type fallback_endpoint: str
:return: HTTP path to redirect to
:rtype: str
"""
for c in [request.args.get('next'), session.pop('next', None)]:
if is_redirect_safe(c):
return c
return url_for(fallback_endpoint, *args, **kwargs)
def is_redirect_safe(target: Optional[str]) -> bool:
"""Check if redirect is safe, that is using HTTP protocol and is pointing
to the same site.
:param target: redirect target url
:type target: str
:return: flag signalling whether redirect is safe
:rtype: bool
"""
if not target:
return False
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ('http', 'https') and ref_url.netloc == test_url.netloc
class RuleBase(Rule):
def __init__(self, obj):
self.obj = obj
super().__init__()
class PublicAccessRuleBase(RuleBase):
def deny(self):
abort(404)
class OwnerAccessRuleBase(RuleBase):
def deny(self):
abort(403)
class PermissionBase(Permission):
rule_class = None
def __init__(self, obj):
self.obj = obj
super().__init__()
def rule(self):
return self.rule_class(self.obj)
class AccessManagerBase:
primary = None
secondary = None
def __init__(self, obj, secondary_condition):
self.obj = obj
self.perms = []
if self.primary:
self.perms.append(self.primary(obj))
if self.secondary and secondary_condition:
self.perms.append(self.secondary(obj))
def check(self):
for perm in self.perms:
if not perm.check():
perm.deny()
TokenCheckResult = collections.namedtuple(
'TokenCheckResult', ['is_error', 'message', 'payload']
)
def check_token(token: str, secret: str, max_age: int) -> TokenCheckResult:
"""Check token validity, returns validation result with payload if token
is valid.
:param token: token to check
:type token: str
:param secret: secret that was used to generate token
:type secret: str
:param max_age: max age of token
:type max_age: int
:return: validation result
:rtype: TokenCheckResult
"""
serializer = URLSafeTimedSerializer(secret)
payload = None
is_error = True
msg = None
try:
payload = serializer.loads(token, max_age=max_age)
is_error = False
except SignatureExpired as e:
msg = _(
"token expired, it's valid for 48 hrs and it was generated on %(date)s",
date=e.date_signed,
)
except BadSignature:
msg = _('invalid token')
return TokenCheckResult(is_error, message=msg, payload=payload)
| [
"jarek.zgoda@gmail.com"
] | jarek.zgoda@gmail.com |
02eacc35cfb41f8410152f22332522879c2551a3 | b1b7206d4c8fb878e47bd2cd3a4e6b2a1f94cdc4 | /hw3/code/tip3/test_script.py | ad3345cf10406447eaabe3762601d306f5da5eb0 | [] | no_license | vachelch/MLDS | a6752e2e568841841389e50e7750d658e2157c29 | e4fcaeae3b8a63aaefbf07b9ca0c107e9601ad31 | refs/heads/master | 2020-05-04T23:08:00.507062 | 2019-04-04T16:56:23 | 2019-04-04T16:56:23 | 179,533,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | from dataset import Dataset
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
dataset = Dataset()
np.random.seed(0)
r, c = 5, 5
dataset.data_generator(64)
true_imgs = dataset.next_batch()
true_imgs = true_imgs.transpose(0, 2, 3, 1)
fig, axs = plt.subplots(r, c)
cnt = 0
for i in range(r):
for j in range(c):
axs[i,j].imshow(true_imgs[cnt, :,:,:])
axs[i,j].axis('off')
cnt += 1
fig.savefig('log/test.png')
plt.close() | [
"r06944043@ntu.edu.tw"
] | r06944043@ntu.edu.tw |
84539c92ba6c5c35f7ab4a5a00e5a18ac93e262a | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/datamigration/v20171115preview/project.py | 21a6bd7bee26c2b1642ee6b69c5fc5ceafc3190b | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,518 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ProjectArgs', 'Project']
@pulumi.input_type
class ProjectArgs:
def __init__(__self__, *,
group_name: pulumi.Input[str],
service_name: pulumi.Input[str],
source_platform: pulumi.Input['ProjectSourcePlatform'],
target_platform: pulumi.Input['ProjectTargetPlatform'],
databases_info: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]]] = None,
location: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
source_connection_info: Optional[pulumi.Input['SqlConnectionInfoArgs']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_connection_info: Optional[pulumi.Input['SqlConnectionInfoArgs']] = None):
"""
The set of arguments for constructing a Project resource.
:param pulumi.Input[str] group_name: Name of the resource group
:param pulumi.Input[str] service_name: Name of the service
:param pulumi.Input['ProjectSourcePlatform'] source_platform: Source platform for the project
:param pulumi.Input['ProjectTargetPlatform'] target_platform: Target platform for the project
:param pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]] databases_info: List of DatabaseInfo
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] project_name: Name of the project
:param pulumi.Input['SqlConnectionInfoArgs'] source_connection_info: Information for connecting to source
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input['SqlConnectionInfoArgs'] target_connection_info: Information for connecting to target
"""
pulumi.set(__self__, "group_name", group_name)
pulumi.set(__self__, "service_name", service_name)
pulumi.set(__self__, "source_platform", source_platform)
pulumi.set(__self__, "target_platform", target_platform)
if databases_info is not None:
pulumi.set(__self__, "databases_info", databases_info)
if location is not None:
pulumi.set(__self__, "location", location)
if project_name is not None:
pulumi.set(__self__, "project_name", project_name)
if source_connection_info is not None:
pulumi.set(__self__, "source_connection_info", source_connection_info)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if target_connection_info is not None:
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="groupName")
def group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group
"""
return pulumi.get(self, "group_name")
@group_name.setter
def group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "group_name", value)
@property
@pulumi.getter(name="serviceName")
def service_name(self) -> pulumi.Input[str]:
"""
Name of the service
"""
return pulumi.get(self, "service_name")
@service_name.setter
def service_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_name", value)
@property
@pulumi.getter(name="sourcePlatform")
def source_platform(self) -> pulumi.Input['ProjectSourcePlatform']:
"""
Source platform for the project
"""
return pulumi.get(self, "source_platform")
@source_platform.setter
def source_platform(self, value: pulumi.Input['ProjectSourcePlatform']):
pulumi.set(self, "source_platform", value)
@property
@pulumi.getter(name="targetPlatform")
def target_platform(self) -> pulumi.Input['ProjectTargetPlatform']:
"""
Target platform for the project
"""
return pulumi.get(self, "target_platform")
@target_platform.setter
def target_platform(self, value: pulumi.Input['ProjectTargetPlatform']):
pulumi.set(self, "target_platform", value)
@property
@pulumi.getter(name="databasesInfo")
def databases_info(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]]]:
"""
List of DatabaseInfo
"""
return pulumi.get(self, "databases_info")
@databases_info.setter
def databases_info(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DatabaseInfoArgs']]]]):
pulumi.set(self, "databases_info", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="projectName")
def project_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the project
"""
return pulumi.get(self, "project_name")
@project_name.setter
def project_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_name", value)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> Optional[pulumi.Input['SqlConnectionInfoArgs']]:
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@source_connection_info.setter
def source_connection_info(self, value: Optional[pulumi.Input['SqlConnectionInfoArgs']]):
pulumi.set(self, "source_connection_info", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> Optional[pulumi.Input['SqlConnectionInfoArgs']]:
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@target_connection_info.setter
def target_connection_info(self, value: Optional[pulumi.Input['SqlConnectionInfoArgs']]):
pulumi.set(self, "target_connection_info", value)
class Project(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
databases_info: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseInfoArgs']]]]] = None,
group_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
source_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
source_platform: Optional[pulumi.Input['ProjectSourcePlatform']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
target_platform: Optional[pulumi.Input['ProjectTargetPlatform']] = None,
__props__=None):
"""
A project resource
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseInfoArgs']]]] databases_info: List of DatabaseInfo
:param pulumi.Input[str] group_name: Name of the resource group
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[str] project_name: Name of the project
:param pulumi.Input[str] service_name: Name of the service
:param pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']] source_connection_info: Information for connecting to source
:param pulumi.Input['ProjectSourcePlatform'] source_platform: Source platform for the project
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']] target_connection_info: Information for connecting to target
:param pulumi.Input['ProjectTargetPlatform'] target_platform: Target platform for the project
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ProjectArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A project resource
:param str resource_name: The name of the resource.
:param ProjectArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ProjectArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
databases_info: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DatabaseInfoArgs']]]]] = None,
group_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
service_name: Optional[pulumi.Input[str]] = None,
source_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
source_platform: Optional[pulumi.Input['ProjectSourcePlatform']] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_connection_info: Optional[pulumi.Input[pulumi.InputType['SqlConnectionInfoArgs']]] = None,
target_platform: Optional[pulumi.Input['ProjectTargetPlatform']] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ProjectArgs.__new__(ProjectArgs)
__props__.__dict__["databases_info"] = databases_info
if group_name is None and not opts.urn:
raise TypeError("Missing required property 'group_name'")
__props__.__dict__["group_name"] = group_name
__props__.__dict__["location"] = location
__props__.__dict__["project_name"] = project_name
if service_name is None and not opts.urn:
raise TypeError("Missing required property 'service_name'")
__props__.__dict__["service_name"] = service_name
__props__.__dict__["source_connection_info"] = source_connection_info
if source_platform is None and not opts.urn:
raise TypeError("Missing required property 'source_platform'")
__props__.__dict__["source_platform"] = source_platform
__props__.__dict__["tags"] = tags
__props__.__dict__["target_connection_info"] = target_connection_info
if target_platform is None and not opts.urn:
raise TypeError("Missing required property 'target_platform'")
__props__.__dict__["target_platform"] = target_platform
__props__.__dict__["creation_time"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:datamigration:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180315preview:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180331preview:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180419:Project"), pulumi.Alias(type_="azure-native:datamigration/v20180715preview:Project"), pulumi.Alias(type_="azure-native:datamigration/v20210630:Project")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Project, __self__).__init__(
'azure-native:datamigration/v20171115preview:Project',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Project':
"""
Get an existing Project resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ProjectArgs.__new__(ProjectArgs)
__props__.__dict__["creation_time"] = None
__props__.__dict__["databases_info"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["source_connection_info"] = None
__props__.__dict__["source_platform"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["target_connection_info"] = None
__props__.__dict__["target_platform"] = None
__props__.__dict__["type"] = None
return Project(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> pulumi.Output[str]:
"""
UTC Date and time when project was created
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter(name="databasesInfo")
def databases_info(self) -> pulumi.Output[Optional[Sequence['outputs.DatabaseInfoResponse']]]:
"""
List of DatabaseInfo
"""
return pulumi.get(self, "databases_info")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
The project's provisioning state
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> pulumi.Output[Optional['outputs.SqlConnectionInfoResponse']]:
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="sourcePlatform")
def source_platform(self) -> pulumi.Output[str]:
"""
Source platform for the project
"""
return pulumi.get(self, "source_platform")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> pulumi.Output[Optional['outputs.SqlConnectionInfoResponse']]:
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="targetPlatform")
def target_platform(self) -> pulumi.Output[str]:
"""
Target platform for the project
"""
return pulumi.get(self, "target_platform")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| [
"noreply@github.com"
] | bpkgoud.noreply@github.com |
fef291a802c179905e0f84f64fa6d6589ddd24b7 | ed6c1d30ced7e984ae507f5a25ebe4d92b33b5d8 | /segno_mimos/qrcode/main.py | 272d54bec4920f76f2925aac87269f9e1d825a92 | [
"BSD-3-Clause"
] | permissive | heuer/segno-mimos | 222f7720b183a08f20b15cf5971e567d5808c740 | 0b1b220c63fcda9fcaa0e42725ea719651a1d53e | refs/heads/master | 2021-07-12T02:20:55.216745 | 2020-08-01T00:54:42 | 2020-08-01T00:54:42 | 64,956,618 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,542 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2011, Lincoln Loop
# Copyright (c) 2016 - 2017 -- Lars Heuer - Semagia <http://www.semagia.com/>.
# All rights reserved.
#
# License: BSD License
#
from __future__ import absolute_import, unicode_literals, print_function
import warnings
import segno
from . import constants, exceptions, util
from segno.utils import check_valid_scale, check_valid_border
from segno_mimos.qrcode.image.base import BaseImage
try:
from qrcode.image.base import BaseImage as qrcodeBaseImage
except ImportError:
qrcodeBaseImage = BaseImage
try: # pragma: no cover
range = xrange # Python 2
except NameError:
pass
# <https://wiki.python.org/moin/PortingToPy3k/BilingualQuickRef#New_Style_Classes>
__metaclass__ = type
def make(data=None, **kw):
qr = QRCode(**kw)
qr.add_data(data)
return qr.make_image()
def _check_valid_factory(img_factory):
if img_factory is not None:
assert issubclass(img_factory, (BaseImage, qrcodeBaseImage)) or hasattr(img_factory, 'drawrect')
class QRCode:
def __init__(self, version=None, error_correction=constants.ERROR_CORRECT_M,
box_size=10, border=4, image_factory=None):
check_valid_scale(box_size)
self.version = version and int(version)
self.error_correction = int(error_correction)
self.box_size = int(box_size)
self.border = int(border)
self.image_factory = image_factory
_check_valid_factory(image_factory)
self.clear()
def clear(self):
self.modules = None
self.modules_count = 0
self.data_cache = None
self.data_list = []
self.segno_qrcode = None
def add_data(self, data, optimize=20):
if isinstance(data, util.QRData):
self.data_list.append(data)
else:
if optimize:
chunks = tuple(util.optimal_data_chunks(data))
self.data_list.extend(chunks)
else:
self.data_list.append(util.QRData(data))
self.data_cache = None
def make(self, fit=True):
if fit:
self.version = None
self.makeImpl(False, None)
def makeImpl(self, test, mask_pattern):
if test:
warnings.warn('"test" is not supported')
segno_qrcode = segno.make_qr(self.data_list or '', mode=None,
version=self.version,
error=self.error_correction,
eci=False, boost_error=False, mask=mask_pattern)
self.data_cache = True
self.segno_qrcode = segno_qrcode
self.modules_count = len(segno_qrcode.matrix)
self.modules = [[bool(b) for b in row] for row in segno_qrcode.matrix]
self.version = segno_qrcode.version
def print_tty(self, out=None):
if self.data_cache is None:
self.make()
print(self.segno_qrcode.terminal(out=out, border=self.border))
def print_ascii(self, out=None, tty=False, invert=False):
if self.data_cache is None:
self.make()
print(self.segno_qrcode.terminal(out=out, border=self.border))
def make_image(self, image_factory=None, **kw):
check_valid_scale(self.box_size)
check_valid_border(self.border)
if self.data_cache is None:
self.make()
image_factory = image_factory or self.image_factory
_check_valid_factory(image_factory)
if image_factory is None or image_factory.kind in ('PNG', 'EPS', 'PDF', 'SVG'):
config = dict(scale=self.box_size, border=self.border)
kind = None
if image_factory is not None:
kind = image_factory.kind
try:
config.update(image_factory.config)
except AttributeError:
pass
try:
config['background'] = image_factory.background
except AttributeError:
pass
return _Image(self.segno_qrcode, config, kind)
im = image_factory(self.border, self.modules_count, self.box_size, **kw)
for r in range(self.modules_count):
for c in range(self.modules_count):
if self.modules[r][c]:
im.drawrect(r, c)
return im
def get_matrix(self):
if self.data_cache is None:
self.make()
if not self.border:
return self.modules
width = len(self.modules) + self.border*2
code = [[False]*width] * self.border
x_border = [False]*self.border
for module in self.modules:
code.append(x_border + module + x_border)
code += [[False]*width] * self.border
return code
class _Image:
"""\
This class is almost similar to qrcode.image.pil.PilImage and is able to
save a QR Code in all output formats which are common by qrcode and Segno.
"""
kind = None
allowed_kinds = ('PNG', 'EPS', 'PDF', 'SVG')
def __init__(self, segno_qrcode, config, kind):
self._qrcode = segno_qrcode
self.default_config = config
self.width = len(segno_qrcode.matrix)
self.kind = kind
def save(self, stream, format=None, kind=None, **kw):
fmt = format
if fmt is None:
fmt = kind or self.kind
if fmt is not None:
fmt = fmt.lower()
config = dict(self.default_config)
background_was_set = 'back_color' in kw or 'background' in kw or 'background' in config
config['color'] = kw.pop('fill_color', config.get('color', '#000'))
config['background'] = kw.pop('back_color', kw.pop('background', config.get('background', '#fff')))
if config['background'] == 'transparent':
config['background'] = None
if fmt == 'svg':
# SVG default config
svg_config = dict(scale=config.get('scale', 10) / 10, unit='mm', svgversion=1.1)
config.update(svg_config)
config.update(kw)
if fmt in (None, 'png'):
self._qrcode.save(stream, kind='png', **config)
return
if not background_was_set and fmt in ('eps', 'pdf', 'svg'):
# Remove background color if not set explictly
config['background'] = None
if fmt in ('eps', 'pdf', 'svg'):
self._qrcode.save(stream, kind=fmt, **config)
return
raise ValueError('Unsupported format "{}"'.format(fmt))
| [
"heuer@semagia.com"
] | heuer@semagia.com |
397d9bb3a19a2767981145d677e1eb98ddc7bab3 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /utils/ETF/Redemption_HA/YW_ETFSS_SHSH_051.py | 59e59b0cdd08a7a3a4f4f1930d68b0081e0fc709 | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,694 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import time
sys.path.append("/home/yhl2/workspace/xtp_test/ETF")
from import_common import *
sys.path.append("/home/yhl2/workspace/xtp_test/ETF/etf_service")
from ETF_GetComponentShare import etf_get_all_component_stk
from ETF_Add import etf_add
class YW_ETFSS_SHSH_051(xtp_test_case):
def test_YW_ETFSS_SHSH_051(self):
# -----------ETF赎回-------------
title = 'T日购买ETF-T日赎回当天申购的ETF-' \
'T日用当天赎回的成分股申购ETF-T日卖出T日赎回的成分股'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'case_ID': 'ATC-204-051',
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title + ', case_ID=' + case_goal['case_ID'])
unit_info = {
'ticker': '580480', # etf代码
'etf_unit': 1, # etf赎回单位数
'component_unit_buy': 1, # 成分股买入单位数
'etf_unit_buy': 1, # etf买入单位数
'component_unit_sell': 1 # 成分股卖出单位数
}
# -----------二级市场买入etf-----------
etf_add(Api,
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
unit_info['ticker'],
unit_info['etf_unit_buy'])
time.sleep(3)
# -----------查询ETF申购前成分股持仓-------------
component_stk_info = etf_get_all_component_stk(unit_info['ticker'])
# -----------ETF申购-------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryEtfQty(unit_info['ticker'], '1', '14', '2', '0',
'B', case_goal['期望状态'], Api)
# 定义委托参数信息------------------------------------------
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'用例错误原因': '获取下单参数失败, ' + stkparm['错误原因'],
}
etf_query_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
stkparm['证券代码'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_REDEMPTION'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
int(unit_info['etf_unit'] * stkparm['最小申赎单位']),
}
EtfParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = etfServiceTest(Api, case_goal, wt_reqs, component_stk_info)
etf_creation_log(case_goal, rs)
# -----------ETF申购-------------
case_goal['期望状态'] = '废单'
case_goal['errorID'] = 11010121
case_goal['errorMSG'] = 'Failed to check security quantity.'
# 定义委托参数信息------------------------------------------
# 如果下单参数获取失败,则用例失败
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
stkparm['证券代码'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_PURCHASE'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
int(unit_info['etf_unit'] * stkparm['最小申赎单位']),
}
EtfParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = etfServiceTest(Api, case_goal, wt_reqs)
etf_creation_log(case_goal, rs)
# ------------二级市场卖出成份股-----------
case_goal['期望状态'] = '全成'
case_goal['errorID'] = 0
case_goal['errorMSG'] = ''
etf_component_info = QueryEtfComponentsInfoDB(stkparm['证券代码'],wt_reqs['market'])
rs = {}
for stk_info in etf_component_info:
if stk_info[1] != 2:
stk_code = stk_info[0]
components_share = QueryEtfComponentsDB(stkparm['证券代码'],
stk_code)
components_total = int(components_share *
unit_info['component_unit_sell'])
quantity = get_valid_amount(components_total)
limitup_px = getUpPrice(stk_code)
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
stk_code,
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price':
limitup_px,
'quantity':
quantity,
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
rs = serviceTest(Api, case_goal, wt_reqs)
if rs['用例测试结果'] is False:
etf_components_sell_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
etf_components_sell_log(case_goal, rs)
self.assertEqual(rs['用例测试结果'], True)
if __name__ == '__main__':
unittest.main()
| [
"418033945@qq.com"
] | 418033945@qq.com |
3ab6e6f99098e0b8731959aad64c12f1b6a211bd | cde60eed1c85120ae4ec2bc3a2bb4cb418359aee | /lib/disco/mr_path.py | 8449564f47a4928dde7a93f7c791c745438cac35 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | aronwc/quac | c2750c01902b95aead40d75f7c7e8fe5aa9c6e15 | f383b4ffba529d19f2c1d24496e5125118d1e13b | refs/heads/master | 2021-01-15T16:46:40.382175 | 2013-05-22T04:28:27 | 2013-05-22T04:28:27 | 9,962,349 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,571 | py | '''Disco has a scheme to detect which modules your stuff uses, put them in a
zip file, and copy it to workers. In theory, this enables you to install
absolutely nothing app-specific on worker nodes. Unfortunately, it works
poorly (e.g., it misses modules) and has some very weird quirks (e.g.,
packages are not supported). However, if you have a filesystem shared by
all the Disco nodes (e.g., via NFS), you can put your stuff in $PYTHONPATH
and let workers find it that way. Unfortunately, Disco mangles $PYTHONPATH.
This module works around that. To use, copy $PYTHONPATH to $PYTHONPATH_COPY
in your .bashrc, restart the Disco master, then place the following two
lines at the top of your Python scripts before any Disco stuff:
import mr_path
mr_path.fix_pythonpath()
Notes:
1. You will need to be able import *this module* before you can fix the
path; to do so, you'll want to set required_modules (mr.base.Job does
this automatically).
2. This module still doesn't fix the problem that Disco programs (e.g.,
modules with subclasses of disco.*) cannot be packaged. There is a
failed attempt at that in r3be9. Perhaps another wrapper is possible.
There is a bug for (perhaps part of) this problem:
<https://github.com/discoproject/disco/issues/328>'''
import os
import sys
path_fixed = False
def fix_pythonpath():
global path_fixed
if (not path_fixed):
for i in os.environ['PYTHONPATH_COPY'].split(':'):
sys.path.insert(0, i)
path_fixed = True
| [
"reidpr@lanl.gov"
] | reidpr@lanl.gov |
f061ccdde841c5c5ac087dabb6928fff1c970d56 | 68c29e7a17d87e34b1d6613c3e2e70a36fd2adcc | /easy/349_two_array_intersection.py | 1afa7cccb3ed847d3c43ac2e980d55f050a64209 | [
"MIT"
] | permissive | Sukhrobjon/leetcode | 284242fbfded3e47a57ce9230f9bc1175685cd7a | 547c200b627c774535bc22880b16d5390183aeba | refs/heads/master | 2022-02-26T20:56:57.347119 | 2022-02-05T01:58:49 | 2022-02-05T01:58:49 | 192,158,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | """
Given two arrays, write a function to compute their intersection.
Note:
- Each element in the result must be unique.
- The result can be in any order.
link: https://leetcode.com/problems/intersection-of-two-arrays/
"""
class Solution(object):
def intersection(self, nums1, nums2):
"""
Finds the intersection of two given array.
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
return list(set(nums1).intersection(set(nums2)))
nums1 = [1, 2, 2, 1]
nums2 = [2, 2]
obj = Solution()
result = obj.intersection(nums1, nums2)
print(result)
| [
"sgolibbo@mail.ccsf.edu"
] | sgolibbo@mail.ccsf.edu |
a3adc1e9ab9b4222666320de040e4640c918f092 | 8d50e46e46bed95dac734f148f942c3c4496e855 | /build/chefbot/chefbot_description/catkin_generated/pkg.develspace.context.pc.py | 4d1ee3a6b51f7b7fe27622105e10853f581e0508 | [
"BSD-2-Clause"
] | permissive | kaiodt/kaio_ros_ws | b9a77b3d23ed0e0c2f3ebe8dfdf819c2130e3968 | d9ee0edb97d16cf2a0a6074fecd049db7367a032 | refs/heads/master | 2020-12-29T02:06:24.179739 | 2016-08-29T13:29:24 | 2016-08-29T13:29:24 | 64,547,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "chefbot_description"
PROJECT_SPACE_DIR = "/home/kaiodt/kaio_ros_ws/devel"
PROJECT_VERSION = "0.0.0"
| [
"kaiodtr@gmail.com"
] | kaiodtr@gmail.com |
196b3a4ec3a18c69c259e617cb54c0e9ce008877 | 62e58c051128baef9452e7e0eb0b5a83367add26 | /x12/4040/278004040.py | 7a7cf770c5ac97ef0155d9ded6220558dac39a57 | [] | no_license | dougvanhorn/bots-grammars | 2eb6c0a6b5231c14a6faf194b932aa614809076c | 09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d | refs/heads/master | 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null | UTF-8 | Python | false | false | 1,581 | py | from bots.botsconfig import *
from records004040 import recorddefs
syntax = {
'version' : '00403', #version of ISA to send
'functionalgroup' : 'HI',
}
structure = [
{ID: 'ST', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BHT', MIN: 1, MAX: 1},
{ID: 'HL', MIN: 1, MAX: 99999, LEVEL: [
{ID: 'TRN', MIN: 0, MAX: 9},
{ID: 'AAA', MIN: 0, MAX: 9},
{ID: 'UM', MIN: 0, MAX: 1},
{ID: 'HCR', MIN: 0, MAX: 1},
{ID: 'REF', MIN: 0, MAX: 9},
{ID: 'DTP', MIN: 0, MAX: 9},
{ID: 'HI', MIN: 0, MAX: 1},
{ID: 'HSD', MIN: 0, MAX: 1},
{ID: 'CRC', MIN: 0, MAX: 9},
{ID: 'CL1', MIN: 0, MAX: 1},
{ID: 'CR1', MIN: 0, MAX: 1},
{ID: 'CR2', MIN: 0, MAX: 1},
{ID: 'CR4', MIN: 0, MAX: 1},
{ID: 'CR5', MIN: 0, MAX: 1},
{ID: 'CR6', MIN: 0, MAX: 1},
{ID: 'CR7', MIN: 0, MAX: 1},
{ID: 'CR8', MIN: 0, MAX: 1},
{ID: 'PWK', MIN: 0, MAX: 99999},
{ID: 'MSG', MIN: 0, MAX: 1},
{ID: 'NM1', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'REF', MIN: 0, MAX: 9},
{ID: 'N2', MIN: 0, MAX: 1},
{ID: 'N3', MIN: 0, MAX: 1},
{ID: 'N4', MIN: 0, MAX: 1},
{ID: 'PER', MIN: 0, MAX: 3},
{ID: 'AAA', MIN: 0, MAX: 9},
{ID: 'PRV', MIN: 0, MAX: 1},
{ID: 'DMG', MIN: 0, MAX: 1},
{ID: 'INS', MIN: 0, MAX: 1},
{ID: 'DTP', MIN: 0, MAX: 9},
]},
]},
{ID: 'SE', MIN: 1, MAX: 1},
]}
]
| [
"jason.capriotti@gmail.com"
] | jason.capriotti@gmail.com |
408f877ad0c183781906ef0d631c6a5bce6ec140 | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /third_party/catapult/catapult_build/run_with_typ.py | b9c69f2e88db8e8169d48a419a76c5d2f48295b3 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 920 | py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper around typ (test your projects)."""
import os
import sys
def Run(top_level_dir, path=None):
"""Runs a set of Python tests using typ.
Args:
top_level_dir: Directory to look for Python unit tests in.
path: A list of extra paths to add to sys.path when running the tests.
Returns:
An exit code (0 for success, otherwise non-zero).
"""
typ_path = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.path.pardir, 'third_party', 'typ'))
_AddToPathIfNeeded(typ_path)
import typ
return typ.main(
top_level_dir=top_level_dir,
path=(path or []),
coverage_source=[top_level_dir])
def _AddToPathIfNeeded(path):
if path not in sys.path:
sys.path.insert(0, path)
| [
"enrico.weigelt@gr13.net"
] | enrico.weigelt@gr13.net |
a57cf4df9f0068460b9410a8f12b9098b79114f7 | 6cf70b611cc4d45a7c3e63d818f100f8be895314 | /067_二进制求和/Solution.py | db242d61bda7d2ca30ea0466d7b824d3ed6133e2 | [] | no_license | hhy5277/LeetCode-9 | 19bed5482841e7dcdc346093b6fb17ed769fe72e | cf240ff3c9124a1af87b6d5f49ec426ef248298c | refs/heads/master | 2020-06-19T00:19:30.194373 | 2019-06-05T13:53:44 | 2019-06-05T13:53:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,043 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2019/4/28 21:01
# @Author : zenRRan
# @Version : python3.7
# @File : Solution.py
# @Software: PyCharm
class Solution:
def addBinary(self, a: str, b: str) -> str:
if a == '':
return b
if b == '':
return a
flag = 0
len_a = len(a)
len_b = len(b)
length = max(len_a, len_b)
a = (length - len(a)) * '0' + a
b = (length - len(b)) * '0' + b
res = ''
for i in range(length):
c = int(a[-1]) + int(b[-1]) + flag
if c > 2:
res += '1'
flag = 1
elif c > 1:
res += '0'
flag = 1
else:
res += str(c)
flag = 0
a = a[:-1]
b = b[:-1]
if flag == 1:
res += '1'
return res[::-1]
data = [['11', '1'], ['1010', '1011']]
for elem in data:
print(Solution().addBinary(elem[0], elem[1]))
| [
"824203828@qq.com"
] | 824203828@qq.com |
4ce566297c9b085acc9406eff21a80e0d11f3e64 | 58afefdde86346760bea40690b1675c6639c8b84 | /leetcode/find-in-mountain-array/397500377.py | 8e6b2ccf21e4d4d83c72f3ba712108de64aea34f | [] | no_license | ausaki/data_structures_and_algorithms | aaa563f713cbab3c34a9465039d52b853f95548e | 4f5f5124534bd4423356a5f5572b8a39b7828d80 | refs/heads/master | 2021-06-21T10:44:44.549601 | 2021-04-06T11:30:21 | 2021-04-06T11:30:21 | 201,942,771 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,516 | py | # title: find-in-mountain-array
# detail: https://leetcode.com/submissions/detail/397500377/
# datetime: Fri Sep 18 23:28:26 2020
# runtime: 28 ms
# memory: 14.6 MB
# """
# This is MountainArray's API interface.
# You should not implement it, or speculate about its implementation
# """
#class MountainArray:
# def get(self, index: int) -> int:
# def length(self) -> int:
class Solution:
def findInMountainArray(self, target: int, mountain_arr: 'MountainArray') -> int:
class Wrap:
def __init__(self, l, r, rev=False):
self.l = l
self.r = r
self.rev = rev
def __getitem__(self, i):
return A.get(self.l + i) if not self.rev else A.get(self.r - i)
def __len__(self):
return self.r - self.l + 1
A = mountain_arr
L = A.length()
i, j = 0, L - 1
while i <= j:
m = (i + j) // 2
v = A.get(m)
w = A.get(m - 1) if m else -1
if w < v:
i = m + 1
else:
j = m - 1
# print(i, j)
i = j
l = Wrap(0, i)
k = bisect.bisect_left(l, target)
if k >= len(l):
return -1
if l[k] == target:
return k
r = Wrap(i + 1, L - 1, True)
k = bisect.bisect_left(r, target)
if k >= len(r):
return -1
if r[k] == target:
return L - 1 - k
return -1 | [
"ljm51689@gmail.com"
] | ljm51689@gmail.com |
7fa4b7b39352d11a19ca84ce9da4278700dbb0ed | 632dcb4e37cadd87cb7ff8715b0048df5cd0d11b | /cc3d/tests/plugin_test_suite/connectivity_global_fast_test_run/Simulation/connectivity_global_fastSteppables.py | d544b685fd2eb386fca3d8171176a6269cfa8604 | [] | no_license | CompuCell3D/CompuCell3D | df638e3bdc96f84b273978fb479842d071de4a83 | 65a65eaa693a6d2b3aab303f9b41e71819f4eed4 | refs/heads/master | 2023-08-26T05:22:52.183485 | 2023-08-19T17:13:19 | 2023-08-19T17:13:19 | 12,253,945 | 51 | 41 | null | 2023-08-27T16:36:14 | 2013-08-20T20:53:07 | C++ | UTF-8 | Python | false | false | 544 | py |
from cc3d.core.PySteppables import *
class connectivity_global_fastSteppable(SteppableBasePy):
def __init__(self,frequency=1):
SteppableBasePy.__init__(self,frequency)
def start(self):
"""
any code in the start function runs before MCS=0
"""
def step(self,mcs):
"""
type here the code that will run every frequency MCS
:param mcs: current Monte Carlo step
"""
def finish(self):
"""
Finish Function is called after the last MCS
"""
| [
"maciekswat@gmail.com"
] | maciekswat@gmail.com |
fb50a368bcc5d8f9b46c504181ecff05ab605d77 | e7f67295e62fc5301ab23bce06c61f2311c2eeee | /mjml/scripts/mjml-html-compare | f49399ce8f7dc8cc9ba742460090ff97b3623b1d | [
"MIT"
] | permissive | bayesimpact/mjml-stub | 94d10588359990cd58d2085429b19a3777c51f15 | 30bab3f2e197d2f940f58439f2e8cd9fadb58d48 | refs/heads/main | 2023-05-08T11:54:19.313877 | 2021-01-25T21:30:48 | 2021-01-25T21:30:48 | 344,026,118 | 0 | 0 | MIT | 2021-03-03T06:31:49 | 2021-03-03T06:31:48 | null | UTF-8 | Python | false | false | 471 | #!/usr/bin/env python3
from pathlib import Path
import sys
from htmlcompare import assert_same_html
from mjml import mjml_to_html
mjml_filename = Path(sys.argv[1])
html_filename = Path(sys.argv[2])
with mjml_filename.open('rb') as mjml_fp:
result = mjml_to_html(mjml_fp)
with html_filename.open('rb') as html_fp:
expected_html = html_fp.read()
assert not result.errors
actual_html = result.html
assert_same_html(expected_html, actual_html, verbose=True)
| [
"felix.schwarz@oss.schwarz.eu"
] | felix.schwarz@oss.schwarz.eu | |
9360433c0a871cbdf2706b2337e8d4a813b08125 | 920b9cb23d3883dcc93b1682adfee83099fee826 | /pipeline/variable_framework/migrations/0001_initial.py | 56856837e4fec961b57f7bb1f80e956d630c85ae | [
"MIT",
"LGPL-2.1-or-later",
"LGPL-3.0-only"
] | permissive | TencentBlueKing/bk-itsm | f817fb166248d3059857b57d03e8b5ec1b78ff5b | 2d708bd0d869d391456e0fb8d644af3b9f031acf | refs/heads/master | 2023-08-31T23:42:32.275836 | 2023-08-22T08:17:54 | 2023-08-22T08:17:54 | 391,839,825 | 100 | 86 | MIT | 2023-09-14T08:24:54 | 2021-08-02T06:35:16 | Python | UTF-8 | Python | false | false | 1,470 | py | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="VariableModel",
fields=[
("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
("code", models.CharField(max_length=255, unique=True, verbose_name="\u53d8\u91cf\u7f16\u7801")),
("status", models.BooleanField(default=True, verbose_name="\u53d8\u91cf\u662f\u5426\u53ef\u7528")),
],
options={"verbose_name": "Variable\u53d8\u91cf", "verbose_name_plural": "Variable\u53d8\u91cf"},
),
]
| [
"1758504262@qq.com"
] | 1758504262@qq.com |
38f4aaa11589e5d2daef4908a156e77873951653 | 77a7e01cf07531c8d4764c6e3edbbf956855a936 | /data-processing/tests/test_match_symbols.py | e7c3221cabccb396500fd7270b135827a86149c8 | [
"Apache-2.0",
"AGPL-3.0-only",
"GPL-1.0-or-later",
"AGPL-3.0-or-later"
] | permissive | huhuaping/scholarphi | ccc6afa8b2cfea1888748a7457fb882a98e15286 | ca892b41ab96a48b88183d8c06a26b9374c9167d | refs/heads/main | 2023-08-31T00:00:23.375640 | 2021-09-19T07:55:12 | 2021-09-19T07:55:12 | 374,073,210 | 0 | 0 | Apache-2.0 | 2021-06-05T09:29:32 | 2021-06-05T09:29:32 | null | UTF-8 | Python | false | false | 1,912 | py | from common.match_symbols import Match, get_mathml_matches
DEFAULT_TEX_PATH = "tex-path"
DEFAULT_EQUATION_INDEX = 0
def test_matches_self():
mathml = "<mi>x</mi>"
matches = get_mathml_matches([mathml])
assert len(matches) == 1
assert matches[mathml] == [Match(mathml, mathml, 1)]
def test_matches_symbol_with_shared_base():
x_sub_i = "<msub><mi>x</mi><mi>i</mi></msub>"
x_squared = "<msup><mi>x</mi><mn>2</mn></msup>"
matches = get_mathml_matches([x_sub_i, x_squared], allow_self_matches=False)
assert matches[x_sub_i] == [Match(x_sub_i, x_squared, 1)]
assert matches[x_squared] == [Match(x_squared, x_sub_i, 1)]
def test_exact_match_ranks_higher_than_partial_match():
x_sub_i = "<msub><mi>x</mi><mi>i</mi></msub>"
x_squared = "<msup><mi>x</mi><mn>2</mn></msup>"
matches = get_mathml_matches([x_sub_i, x_squared])
assert matches[x_sub_i] == [
Match(x_sub_i, x_sub_i, 1),
Match(x_sub_i, x_squared, 2),
]
def test_does_not_match_base_to_subscript():
i = "<mi>i</mi>"
x_sub_i = "<msub><mi>x</mi><mi>i</mi></msub>"
matches = get_mathml_matches([i, x_sub_i], allow_self_matches=False)
assert i not in matches
assert x_sub_i not in matches
def test_does_not_match_using_shared_subscript():
x_sub_i = "<msub><mi>x</mi><mi>i</mi></msub>"
t_sub_i = "<msub><mi>t</mi><mi>i</mi></msub>"
matches = get_mathml_matches([x_sub_i, t_sub_i], allow_self_matches=False)
assert x_sub_i not in matches
assert t_sub_i not in matches
def test_omit_duplicate_matches():
x1 = "<msub><mi>x</mi><mn>1</mn></msub>"
x2 = "<msub><mi>x</mi><mn>2</mn></msub>"
# While x2 is included in the list of MathML equations twice, only one match between
# x1 and x2 should be included in the matches data.
matches = get_mathml_matches([x1, x2, x2], allow_self_matches=False)
assert len(matches[x1]) == 1
| [
"head.andrewm@gmail.com"
] | head.andrewm@gmail.com |
8769ebbd283b5ccdb8e35c30ab2f020c91949723 | eddb5cc6ece559a21fb2d99dc03fb4b9e3e1ddb0 | /fagaiwei/others_test/xueqiu_test.py | d27ccff8868027c398543e88e64c99b26309ce54 | [] | no_license | KKtoNN/python_spider | a9bdd005d607b1265a556cb4908e84804c0bfc62 | c72bd061c3ca4145fef85b0fd9c15576441cdb09 | refs/heads/master | 2020-03-18T22:50:00.131802 | 2018-05-30T00:47:56 | 2018-05-30T00:47:56 | 135,367,902 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 920 | py | import requests
# url = "http://news.people.com.cn/210801/211150/index.js?_=1525332714933"
url = "https://xueqiu.com/v4/statuses/public_timeline_by_category.json?since_id=-1&max_id=-1&count=10&category=111"
headers = {
# "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
# "Accept-Encoding": "gzip, deflate, br",
# "Accept-Language": "zh-CN,zh;q=0.9",
# "Cache-Control": "max-age=0",
# "Connection": "keep-alive",
# "Host": "xueqiu.com",
# "Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36",
}
st_url = "https://xueqiu.com/"
session = requests.session()
res = session.get(st_url, headers=headers)
response = session.get(url, headers=headers)
print(response)
# print(response.json())
result = response.json()
print(result)
| [
"18835702864@163.com"
] | 18835702864@163.com |
40a16ce415508f488033c0f2f56fa0b17b24df6e | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_156/406.py | 4ee40c64dba71311688697c965764f7c08bc02f6 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,338 | py | from math import ceil
def solve_problem(file_name):
input_file = file_name + ".in"
output_file = file_name + ".out"
f = open(input_file, "r")
g = open(output_file, "w")
# Get test cases:
test_cases = int(f.readline())
for test_case in range(1, test_cases + 1):
length = f.readline()
pancakes = map(int, f.readline().split())
answer = solve_test_case(pancakes)
result = "Case #" + str(test_case) + ": " + str(answer) + "\n"
g.write(result)
print result
return "Done"
def solve_test_case(pancakes):
# Try every scenario where we set:
# how many pancakes one person can eat.
upper_bound = max(pancakes) # should be no more than what's given
best_time = float("inf")
for max_pancakes in range(upper_bound, 0, -1):
moves_required = 0
for pancake_stack in pancakes:
moves_required += moves_needed(pancake_stack, max_pancakes)
time = max_pancakes + moves_required
best_time = min(best_time, time)
return best_time
def moves_needed(pancakes, max_pancakes):
pancakes = float(pancakes)
max_pancakes = float(max_pancakes)
moves_needed = ceil(pancakes / max_pancakes)
return int(moves_needed) - 1
print solve_problem("B-large")
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
0f47292c3df5ab71c9d9bdec321f379075b14053 | 9fce5f629873ef5c43274fdae1d49a270ec78f38 | /venv/bin/pytest | f53807b461ad11d5c4db49d0e044ae604c9722ee | [] | no_license | makkar-nishant123/Pythonselenium | 2c9273d81915bc1f7724de93d7b87c76b5f9066b | 173f49e6522b80f13e6e406756130c0b1376a139 | refs/heads/master | 2021-06-12T09:30:24.097297 | 2021-04-22T02:48:56 | 2021-04-22T02:48:56 | 182,635,591 | 0 | 0 | null | 2021-04-22T02:52:08 | 2019-04-22T06:26:41 | Python | UTF-8 | Python | false | false | 249 | #!/Users/nishantmakkar/PycharmProjects/Day1/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pytest import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"makkar.nishant123@gmail.com"
] | makkar.nishant123@gmail.com | |
35dc1fb247c5ddb4c0bce809ecfd038db2097050 | ef821468b081ef2a0b81bf08596a2c81e1c1ef1a | /Python OOP/Testing-Exercise/Code_For_Testing/Account.py | 3e74b71551a7fbf33d20b751a7e5e99e0818cc42 | [] | no_license | Ivaylo-Atanasov93/The-Learning-Process | 71db22cd79f6d961b9852f140f4285ef7820dd80 | 354844e2c686335345f6a54b3af86b78541ed3f3 | refs/heads/master | 2023-03-30T20:59:34.304207 | 2021-03-29T15:23:05 | 2021-03-29T15:23:05 | 294,181,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,475 | py | class Account:
def __init__(self, owner, amount=0):
self.owner = owner
self.amount = amount
self._transactions = []
def add_transaction(self, amount):
if isinstance(amount, int):
self._transactions.append(amount)
#self.amount += amount
else:
raise ValueError("please use int for amount")
@property
def balance(self):
return sum(self._transactions) + self.amount
@staticmethod
def validate_transaction(account, amount_to_add):
if account.amount + sum(account._transactions) + amount_to_add < 0:
raise ValueError("sorry cannot go in debt!")
else:
account._transactions.append(amount_to_add)
return f"New balance: {sum(account._transactions) + account.amount}"
def __len__(self):
return len(self._transactions)
def __str__(self):
return f'Account of {self.owner} with starting amount: {self.amount}'
def __repr__(self):
return f'Account({self.owner}, {self.amount})'
def __getitem__(self, item):
return self._transactions[item]
def __reversed__(self):
return reversed(self._transactions)
def __gt__(self, other):
return self.balance > other.balance
def __ge__(self, other):
return self.balance >= other.balance
def __lt__(self, other):
return self.balance < other.balance
def __le__(self, other):
return self.balance <= other.balance
def __eq__(self, other):
return self.balance == other.balance
def __ne__(self, other):
return self.balance != other.balance
def __add__(self, other):
new_account = Account(f'{self.owner}&{other.owner}' , self.amount + other.amount)
new_account._transactions = self._transactions + other._transactions
return new_account
# acc = Account('bob', 10)
# acc2 = Account('john')
# print(acc)
# print(repr(acc))
# acc.add_transaction(20)
# acc.add_transaction(-20)
# acc.add_transaction(30)
# print(acc.balance)
# print(len(acc))
# for transaction in acc:
# print(transaction)
# print(acc[1])
# print(list(reversed(acc)))
# acc2.add_transaction(10)
# acc2.add_transaction(60)
# print(acc > acc2)
# print(acc >= acc2)
# print(acc < acc2)
# print(acc <= acc2)
# print(acc == acc2)
# print(acc != acc2)
# acc3 = acc + acc2
# print(acc3)
# print(acc3._transactions)
# print(Account.validate_transaction(acc, 100))
| [
"ivailo.atanasov93@gmail.com"
] | ivailo.atanasov93@gmail.com |
652a8caf01f28a33c48720bd5911ab324e989653 | 9b6f36f544af5a2c1c042b18dda920c78fd11331 | /omsBackend/omsBackend/urls.py | 6d22eadd6ee826298b7574e14a6d7ef4ea919b65 | [] | no_license | Nikita-stels/MyOms | a946f08b4ba7abfa8392e98c579320b501a7ca2a | fdaf9d5a2a29b5386c1a86fcf89a2c0d5527687a | refs/heads/master | 2022-09-17T20:40:45.228067 | 2020-01-08T14:41:04 | 2020-01-08T14:41:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,400 | py | # -*- coding: utf-8 -*-
# author: kiven
from django.conf.urls import url, include
from django.conf.urls.static import static
from rest_framework_jwt.views import obtain_jwt_token
from rest_auth.views import PasswordChangeView
from django.views.generic.base import TemplateView
from omsBackend import settings
from omsBackend.routerApi import router
from apps.perms.views import routers
from apps.jobs.views import update_jobs_status
from apps.salts.views import update_states_status, get_state_bygroup
# version模块自动注册需要版本控制的 Model
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) + [
url(r'^api/', include(router.urls)),
url(r'^api/routers/', routers, name="myrouter"),
url(r'^api/update_jobs_status/', update_jobs_status, name="update_jobs_status"),
url(r'^api/update_states_status/', update_states_status, name="update_states_status"),
url(r'^api/get_state_bygroup/', get_state_bygroup, name="get_state_bygroup"),
# salt
url(r'^api/salts/', include('apps.salts.urls')),
# 用户认证
url(r'^api/changepasswd/', PasswordChangeView.as_view(), name='changepasswd'),
url(r'^api/api-token-auth/', obtain_jwt_token, name='rest_framework_token'),
url(r'^api/api-auth/', include('rest_framework.urls', namespace='rest_framework')),
# url(r'', TemplateView.as_view(template_name="index.html")),
]
| [
"1069195546@qq.com"
] | 1069195546@qq.com |
ac620dcfd99271e3beed1f198e5a957010ac5b13 | 4772576b2f7601fb3295cec7756c832c250ffbc2 | /81beg.py | 0b2085be0b560c72211c5f8a2c0d2d62205bb84b | [] | no_license | Dhineshkumarraveendiran/Guvi | db3a956025299fcb2fd06911cc322403c0027ca1 | 3904a980fa59dd079473a4d68c345ed5116160f1 | refs/heads/master | 2020-04-15T04:59:55.974890 | 2019-05-15T10:11:59 | 2019-05-15T10:11:59 | 164,405,170 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | #dvfdf
n,m=map(int,input().split())
a=m-n
print(a)
| [
"noreply@github.com"
] | Dhineshkumarraveendiran.noreply@github.com |
0dc0846c2c3dce2ba4d1e4dfeb5d1fcfafc85aeb | fef925602f644a08491976f9fb6cc86816d30f2e | /WebApp/ToLetTrackerWeb/tolettracker/tolet/apps.py | 04f6c8df568b9e9f94fb76c8d8481c47628656a3 | [] | no_license | alrafiabdullah/tolet_tracker | 6ae62c7ff856b4ff9e7e67cbd82e74ecb5e56fbb | ab9b85a5216a1fe64aeea96ae9ffd5f1adfd2c45 | refs/heads/master | 2022-12-16T10:48:39.615562 | 2020-09-12T16:36:40 | 2020-09-12T16:36:40 | 294,983,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.apps import AppConfig
# Django App
class ToletConfig(AppConfig):
name = 'tolet'
| [
"abdullah34alrafi@gmail.com"
] | abdullah34alrafi@gmail.com |
2b933a9ae42334721a3cae44e3413bd2ebcc44b3 | 4ec2b9c52dfa1d80fff89cead0f4cc8ec2874c1f | /1_tf_intro/exercises_sol/sol1_nNetReg.py | 12776d0d34843aa661b7b87ef8cda4c9a5c743c7 | [] | no_license | rsanchezgarc/deepLearningCourse | 31439ba9640662f0840ee7f5c58657d4dd5b6c5f | 175e3514e2b767ca2f5b4c88e891a777d95f513b | refs/heads/master | 2020-03-21T02:26:50.702008 | 2018-06-20T21:44:33 | 2018-06-20T21:44:33 | 137,998,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,406 | py | import tensorflow as tf
import numpy as np
from keras.datasets import boston_housing
from sklearn import cross_validation
from sklearn.utils import shuffle
from sklearn.preprocessing import StandardScaler
N_EPOCHS= 2
N_HIDDEN= 64
BATCH_SIZE= 32
LEARNING_RATE= 1e-10 #PLAY with learning rate. try 1e-1, 1e-2 ...
#load data
def generateData1(size=1024):
x= np.random.rand(size, 3)*10
y= np.expand_dims( np.sum(x, axis=1) + np.random.rand(size)*.1, axis=-1)
x_train, x_test, y_train, y_test = cross_validation.train_test_split(x, y, test_size=0.1, random_state=121)
return (x_train, y_train), (x_test, y_test)
def generateData2():
(x_train, y_train), (x_test, y_test) = boston_housing.load_data()
y_train= np.expand_dims(y_train, axis= -1)
y_test= np.expand_dims(y_test, axis= -1)
return (x_train, y_train), (x_test, y_test)
(x_train, y_train), (x_test, y_test) = generateData1()
#Normalize data. ( (x-mean)/std )
normalizer= StandardScaler()
x_train=normalizer.fit_transform(x_train)
x_test=normalizer.transform(x_test)
#split train and validation
x_train, x_validation, y_train, y_validation = cross_validation.train_test_split(x_train, y_train, test_size=0.1, random_state=121)
print(x_train.shape, y_train.shape)
#Model definition
inputPh= tf.placeholder(dtype=tf.float32, shape=[None, x_train.shape[1]], name="inputData") #shape= N_Examples x nFeats
labelsPh= tf.placeholder(dtype=tf.float32, shape=[None, 1], name="labelsData")
with tf.variable_scope("hidden_layer"):
w= tf.get_variable(name="weights", shape=[x_train.shape[1],N_HIDDEN], dtype=tf.float32, #shape= nFeats x N_HIDDEN
initializer=tf.truncated_normal_initializer(mean=0.0, stddev=0.1, dtype=tf.float32, seed=None),
regularizer=None, trainable=True)
b= tf.get_variable(name="bias", shape=[N_HIDDEN], dtype=tf.float32, #shape= N_HIDDEN
initializer=tf.constant_initializer(value=0.01, dtype=tf.float32),
regularizer=None, trainable=True)
h1_out= tf.nn.relu( tf.matmul(inputPh,w) + b)
with tf.variable_scope("output_layer"):
w= tf.get_variable(name="weights", shape=[N_HIDDEN,1], dtype=tf.float32, #shape= N_HIDDEN x 1
initializer=tf.truncated_normal_initializer(mean=0.0, stddev=0.1, dtype=tf.float32, seed=None),
regularizer=None, trainable=True)
b= tf.get_variable(name="bias", shape=[1], dtype=tf.float32,
initializer=tf.constant_initializer(value=0.01, dtype=tf.float32),
regularizer=None, trainable=True)
y_pred= tf.matmul(h1_out,w) + b
error = tf.reduce_mean(( tf.square(labelsPh -y_pred) ) ) #shape= N_Examples x 1
#error = tf.losses.mean_squared_error(labelsPh, y_pred) #Equivalent but prefered
optimizer= tf.train.GradientDescentOptimizer(learning_rate= LEARNING_RATE)
#optimizer= tf.train.AdamOptimizer(learning_rate= LEARNING_RATE) #Smarter optimizer
global_step = tf.Variable(0, name='global_step', trainable=False)
train_step = optimizer.minimize(error, global_step=global_step)
session = tf.Session()
session.run(tf.global_variables_initializer())
#FUNCTION TO EVALUATE
def coefficient_of_determination(y_true,y_pred):
def squared_error(y_true,y_pred):
return np.sum((y_pred - y_true) * (y_pred - y_true))
y_mean_pred = [np.mean(y_true) for y in y_true]
squared_error_regr = squared_error(y_true, y_pred)
squared_error_y_mean = squared_error(y_true, y_mean_pred)
return 1 - (squared_error_regr/squared_error_y_mean)
nStep=0
for nEpoch in range( N_EPOCHS ):
x_train, y_train = shuffle(x_train, y_train, random_state=121)
labels_train= []
preds_train= []
for i in range(0, x_train.shape[0], BATCH_SIZE):
feed_dict= {inputPh: x_train[i:i + BATCH_SIZE, ...], labelsPh: y_train[i:i + BATCH_SIZE]}
__, y_pred_train, errorExample= session.run([train_step, y_pred, error], feed_dict=feed_dict)
nStep+=1
labels_train.append( y_train[i:i + BATCH_SIZE])
preds_train.append( y_pred_train)
#EVALUATE VALIDATION DATA
labels_val= []
preds_val= []
for i in range(0, x_validation.shape[0], BATCH_SIZE):
feed_dict= {inputPh: x_validation[i:i + BATCH_SIZE, ...], labelsPh: y_validation[i:i + BATCH_SIZE]}
y_pred_val, errorVal= session.run([y_pred, error], feed_dict=feed_dict)
labels_val.append( y_validation[i:i + BATCH_SIZE])
preds_val.append(y_pred_val)
preds_train= np.concatenate(preds_train)
labels_train= np.concatenate(labels_train)
train_r2= coefficient_of_determination(labels_train, preds_train)
preds_val= np.concatenate(preds_val)
labels_val= np.concatenate(labels_val)
val_r2= coefficient_of_determination(labels_val, preds_val)
print("Epoch %d. train_r2 %f val_r2 %f"%(nEpoch, train_r2, val_r2))
#REPORT PERFORMANCE ON TEST SET
labels_test= []
preds_test= []
for i in range(0, x_test.shape[0], BATCH_SIZE):
feed_dict= {inputPh: x_test[i:i + BATCH_SIZE, ...], labelsPh: y_test[i:i + BATCH_SIZE]}
y_pred_test, errorTest= session.run([y_pred, error], feed_dict=feed_dict)
labels_test.append( y_test[i:i + BATCH_SIZE])
preds_test.append(y_pred_test)
preds_test= np.concatenate(preds_test)
labels_test= np.concatenate(labels_test)
test_r2= coefficient_of_determination(labels_test, preds_test)
print("END. test_r2 %f"%(test_r2))
session.close()
| [
"rubensanchezgarc@gmail.com"
] | rubensanchezgarc@gmail.com |
f988587e634f0ff1e7eafdfc1516557550cfc51e | 16c4d625ad9e945471a2a267e9992c7e9260214f | /project/settings/base.py | c4e2c96e358bdf8bb8c54a749f025709c08eb5ae | [
"BSD-2-Clause"
] | permissive | andywar65/rp_repo | 8cea1c81533250b49a4036fb9b0ff6e93a0dde66 | 726c1426d738b962cabeabd8995aa35767df0c41 | refs/heads/master | 2023-05-26T13:47:48.329624 | 2021-06-05T08:35:05 | 2021-06-05T08:35:05 | 255,056,987 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,238 | py | """
Django settings for project_repo project.
Generated by 'django-admin startproject' using Django 3.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
"""WARNING:
Commits to this file may not be cherry-picked by branches"""
import os
import json
from django.core.exceptions import ImproperlyConfigured
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
BASE_DIR = os.path.dirname(PROJECT_DIR)
APPLICATION_DIR = os.path.dirname(BASE_DIR)
with open(os.path.join(APPLICATION_DIR, 'secrets.json')) as f:
secrets = json.loads(f.read())
def get_secret(setting, secrets=secrets):
'''Get the secret variable or return explicit exception.
Thanks to twoscoopsofdjango'''
try:
return secrets[setting]
except KeyError:
error_msg = 'Set the {0} environment variable'.format(setting)
raise ImproperlyConfigured(error_msg)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# Application definition
INSTALLED_APPS = [
'filebrowser',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.postgres',
'users.apps.UsersConfig',
'pages.apps.PagesConfig',
'blog.apps.BlogConfig',
'cronache.apps.CronacheConfig',
'criterium.apps.CriteriumConfig',
'direzione.apps.DirezioneConfig',
'wordpress.apps.WordpressConfig',
'streamblocks',
'streamfield',
'captcha',
'taggit',
'crispy_forms',
'treebeard',
'private_storage',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'project.processors.get_global_settings',
],
},
},
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
STATICFILES_DIRS = [
os.path.join(PROJECT_DIR, "static"),
]
TAGGIT_CASE_INSENSITIVE = True
FILEBROWSER_VERSIONS = {
'admin_thumbnail': {'verbose_name': 'Admin Thumbnail', 'width': 60, 'height': 60, 'opts': 'crop'},
'thumbnail': {'verbose_name': 'Thumbnail (1 col)', 'width': 60, 'height': 60, 'opts': 'crop'},
'small': {'verbose_name': 'Small (2 col)', 'width': 140, 'height': '', 'opts': ''},
'medium': {'verbose_name': 'Medium (4col )', 'width': 300, 'height': '', 'opts': ''},
'big': {'verbose_name': 'Big (6 col)', 'width': 460, 'height': '', 'opts': ''},
'large': {'verbose_name': 'Large (8 col)', 'width': 680, 'height': '', 'opts': ''},
'wide_landscape': {'verbose_name': 'Orizzontale', 'width': 2048, 'height': 1024, 'opts': 'crop'},
'landscape': {'verbose_name': 'Orizzontale', 'width': 1280, 'height': 720, 'opts': 'crop'},
'portrait': {'verbose_name': 'Verticale', 'width': 768, 'height': 1024, 'opts': 'crop'},
'square': {'verbose_name': 'Quadrato', 'width': 768, 'height': 768, 'opts': 'crop'},
}
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = get_secret('LANGUAGE_CODE')#'en-us'
TIME_ZONE = get_secret('TIME_ZONE')
USE_I18N = True
USE_L10N = True
USE_TZ = True
AUTH_USER_MODEL = 'users.User'
#This stuff has nothing to do with django.site
WEBSITE_NAME = get_secret('WEBSITE_NAME')
WEBSITE_ACRO = get_secret('WEBSITE_ACRO')
#footer external links
#make your own, add them in project.processors.get_global_settings
FB_LINK = get_secret('FB_LINK')
INSTA_LINK = get_secret('INSTA_LINK')
TWIT_LINK = get_secret('TWIT_LINK')
IN_LINK = get_secret('IN_LINK')
GITHUB_LINK = get_secret('GITHUB_LINK')
EXT_LINK = get_secret('EXT_LINK')
| [
"andy.war1965@gmail.com"
] | andy.war1965@gmail.com |
85fcd1aadc483df9977249471836ba9386a69e6e | 4b7e282fe480415f5d52c0fc0429f144156190fe | /google/ads/googleads/v8/services/services/location_view_service/client.py | f89bb5eb1b6a50ff70914ddcca823a87788c50b1 | [
"Apache-2.0"
] | permissive | Z2Xsoft/google-ads-python | c4750357bb19da91bb3b6bf2fa84bef9d2df36d3 | 1779d52a0446c8afb2437b0a9e103dcb849f5590 | refs/heads/main | 2023-08-18T15:22:17.840364 | 2021-09-26T04:08:53 | 2021-09-26T04:08:53 | 410,444,398 | 0 | 0 | Apache-2.0 | 2021-09-26T04:08:53 | 2021-09-26T03:55:38 | null | UTF-8 | Python | false | false | 18,117 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.resources.types import location_view
from google.ads.googleads.v8.services.types import location_view_service
from .transports.base import LocationViewServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import LocationViewServiceGrpcTransport
class LocationViewServiceClientMeta(type):
"""Metaclass for the LocationViewService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[LocationViewServiceTransport]]
_transport_registry["grpc"] = LocationViewServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[LocationViewServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class LocationViewServiceClient(metaclass=LocationViewServiceClientMeta):
"""Service to fetch location views."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
LocationViewServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
LocationViewServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> LocationViewServiceTransport:
"""Return the transport used by the client instance.
Returns:
LocationViewServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def location_view_path(
customer_id: str, campaign_id: str, criterion_id: str,
) -> str:
"""Return a fully-qualified location_view string."""
return "customers/{customer_id}/locationViews/{campaign_id}~{criterion_id}".format(
customer_id=customer_id,
campaign_id=campaign_id,
criterion_id=criterion_id,
)
@staticmethod
def parse_location_view_path(path: str) -> Dict[str, str]:
"""Parse a location_view path into its component segments."""
m = re.match(
r"^customers/(?P<customer_id>.+?)/locationViews/(?P<campaign_id>.+?)~(?P<criterion_id>.+?)$",
path,
)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, LocationViewServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the location view service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.LocationViewServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(
util.strtobool(
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
)
)
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, LocationViewServiceTransport):
# transport is a LocationViewServiceTransport instance.
if credentials:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = LocationViewServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_location_view(
self,
request: location_view_service.GetLocationViewRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> location_view.LocationView:
r"""Returns the requested location view in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.GetLocationViewRequest`):
The request object. Request message for
[LocationViewService.GetLocationView][google.ads.googleads.v8.services.LocationViewService.GetLocationView].
resource_name (:class:`str`):
Required. The resource name of the
location view to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.resources.types.LocationView:
A location view summarizes the
performance of campaigns by Location
criteria.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a location_view_service.GetLocationViewRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, location_view_service.GetLocationViewRequest
):
request = location_view_service.GetLocationViewRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.get_location_view
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("resource_name", request.resource_name),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
__all__ = ("LocationViewServiceClient",)
| [
"noreply@github.com"
] | Z2Xsoft.noreply@github.com |
96abcbb8b4d9b6e80e79f4e640727cf3898dbd82 | a3b306df800059a5b74975793251a28b8a5f49c7 | /Graphs/LX-2/molecule_otsu = False/BioImageXD-1.0/ITK/lib/InsightToolkit/WrapITK/lib/itkScalarConnectedComponentImageFilterPython.py | 90e57b3895923a42a4f17809e6ba7febb18a6a5b | [] | no_license | giacomo21/Image-analysis | dc17ba2b6eb53f48963fad931568576fda4e1349 | ea8bafa073de5090bd8f83fb4f5ca16669d0211f | refs/heads/master | 2016-09-06T21:42:13.530256 | 2013-07-22T09:35:56 | 2013-07-22T09:35:56 | 11,384,784 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 75,342 | py | # This file was automatically generated by SWIG (http://www.swig.org).
# Version 1.3.40
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (3,0,0):
new_instancemethod = lambda func, inst, cls: _itkScalarConnectedComponentImageFilterPython.SWIG_PyInstanceMethod_New(func)
else:
from new import instancemethod as new_instancemethod
if version_info >= (2,6,0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_itkScalarConnectedComponentImageFilterPython', [dirname(__file__)])
except ImportError:
import _itkScalarConnectedComponentImageFilterPython
return _itkScalarConnectedComponentImageFilterPython
if fp is not None:
try:
_mod = imp.load_module('_itkScalarConnectedComponentImageFilterPython', fp, pathname, description)
finally:
fp.close()
return _mod
_itkScalarConnectedComponentImageFilterPython = swig_import_helper()
del swig_import_helper
else:
import _itkScalarConnectedComponentImageFilterPython
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "thisown"): return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
if (name == "thisown"): return self.this.own()
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError(name)
def _swig_repr(self):
try: strthis = "proxy of " + self.this.__repr__()
except: strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
def _swig_setattr_nondynamic_method(set):
def set_attr(self,name,value):
if (name == "thisown"): return self.this.own(value)
if hasattr(self,name) or (name == "this"):
set(self,name,value)
else:
raise AttributeError("You cannot add attributes to %s" % self)
return set_attr
import ITKCommonBasePython
import itkEventObjectsPython
import pyBasePython
import itkConnectedComponentImageFilterPython
import ITKRegionsPython
import itkSizePython
import itkIndexPython
import itkOffsetPython
import itkImageToImageFilterAPython
import itkImagePython
import itkFixedArrayPython
import itkCovariantVectorPython
import vnl_vectorPython
import vcl_complexPython
import vnl_matrixPython
import itkVectorPython
import vnl_vector_refPython
import itkPointPython
import itkMatrixPython
import vnl_matrix_fixedPython
import itkRGBAPixelPython
import itkSymmetricSecondRankTensorPython
import itkRGBPixelPython
import itkImageSourcePython
import itkVectorImagePython
import itkVariableLengthVectorPython
import itkImageToImageFilterBPython
def itkScalarConnectedComponentImageFilterIUS3IUS3_New():
return itkScalarConnectedComponentImageFilterIUS3IUS3.New()
def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_New():
return itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.New()
def itkScalarConnectedComponentImageFilterIUS2IUS2_New():
return itkScalarConnectedComponentImageFilterIUS2IUS2.New()
def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_New():
return itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.New()
def itkScalarConnectedComponentImageFilterIUL3IUL3_New():
return itkScalarConnectedComponentImageFilterIUL3IUL3.New()
def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_New():
return itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.New()
def itkScalarConnectedComponentImageFilterIUL2IUL2_New():
return itkScalarConnectedComponentImageFilterIUL2IUL2.New()
def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_New():
return itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.New()
def itkScalarConnectedComponentImageFilterIUC3IUC3_New():
return itkScalarConnectedComponentImageFilterIUC3IUC3.New()
def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_New():
return itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.New()
def itkScalarConnectedComponentImageFilterIUC2IUC2_New():
return itkScalarConnectedComponentImageFilterIUC2IUC2.New()
def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_New():
return itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.New()
class itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass(itkConnectedComponentImageFilterPython.itkConnectedComponentImageFilterIUC2IUC2):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_ImageDimension
InputImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_InputImageDimension
SameDimensionCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_SameDimensionCheck
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_OutputEqualityComparableCheck
OutputConvertibleToUnsignedIntCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_OutputConvertibleToUnsignedIntCheck
OutputConvertibleToUnsignedLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_OutputConvertibleToUnsignedLongCheck
OutputConvertibleToLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_OutputConvertibleToLongCheck
UnsignedLongConvertibleToOutputCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_UnsignedLongConvertibleToOutputCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetFunctor(self, *args):
"""
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned char)>
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned char)>
"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_GetFunctor(self, *args)
def SetFunctor(self, *args):
"""SetFunctor(self, itk::Functor::SimilarPixelsFunctor<(unsigned char)> functor)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_SetFunctor(self, *args)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass
Create a new object of the class itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.GetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_GetFunctor,None,itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass)
itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.SetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_SetFunctor,None,itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass)
itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_GetPointer,None,itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass)
itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_swigregister
itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_swigregister(itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass)
def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass___New_orig__():
"""itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass___New_orig__()
def itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args):
"""itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass_cast(*args)
class itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass(itkConnectedComponentImageFilterPython.itkConnectedComponentImageFilterIUC3IUC3):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_ImageDimension
InputImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_InputImageDimension
SameDimensionCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_SameDimensionCheck
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_OutputEqualityComparableCheck
OutputConvertibleToUnsignedIntCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_OutputConvertibleToUnsignedIntCheck
OutputConvertibleToUnsignedLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_OutputConvertibleToUnsignedLongCheck
OutputConvertibleToLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_OutputConvertibleToLongCheck
UnsignedLongConvertibleToOutputCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_UnsignedLongConvertibleToOutputCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetFunctor(self, *args):
"""
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned char)>
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned char)>
"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_GetFunctor(self, *args)
def SetFunctor(self, *args):
"""SetFunctor(self, itk::Functor::SimilarPixelsFunctor<(unsigned char)> functor)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_SetFunctor(self, *args)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass
Create a new object of the class itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.GetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_GetFunctor,None,itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass)
itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.SetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_SetFunctor,None,itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass)
itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_GetPointer,None,itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass)
itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_swigregister
itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_swigregister(itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass)
def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass___New_orig__():
"""itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass___New_orig__()
def itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args):
"""itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass_cast(*args)
class itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass(itkConnectedComponentImageFilterPython.itkConnectedComponentImageFilterIUL2IUL2):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_ImageDimension
InputImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_InputImageDimension
SameDimensionCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_SameDimensionCheck
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_OutputEqualityComparableCheck
OutputConvertibleToUnsignedIntCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_OutputConvertibleToUnsignedIntCheck
OutputConvertibleToUnsignedLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_OutputConvertibleToUnsignedLongCheck
OutputConvertibleToLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_OutputConvertibleToLongCheck
UnsignedLongConvertibleToOutputCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_UnsignedLongConvertibleToOutputCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetFunctor(self, *args):
"""
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned long)>
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned long)>
"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_GetFunctor(self, *args)
def SetFunctor(self, *args):
"""SetFunctor(self, itk::Functor::SimilarPixelsFunctor<(unsigned long)> functor)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_SetFunctor(self, *args)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass
Create a new object of the class itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.GetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_GetFunctor,None,itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass)
itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.SetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_SetFunctor,None,itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass)
itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_GetPointer,None,itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass)
itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_swigregister
itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_swigregister(itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass)
def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass___New_orig__():
"""itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass___New_orig__()
def itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args):
"""itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass_cast(*args)
class itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass(itkConnectedComponentImageFilterPython.itkConnectedComponentImageFilterIUL3IUL3):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_ImageDimension
InputImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_InputImageDimension
SameDimensionCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_SameDimensionCheck
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_OutputEqualityComparableCheck
OutputConvertibleToUnsignedIntCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_OutputConvertibleToUnsignedIntCheck
OutputConvertibleToUnsignedLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_OutputConvertibleToUnsignedLongCheck
OutputConvertibleToLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_OutputConvertibleToLongCheck
UnsignedLongConvertibleToOutputCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_UnsignedLongConvertibleToOutputCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetFunctor(self, *args):
"""
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned long)>
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned long)>
"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_GetFunctor(self, *args)
def SetFunctor(self, *args):
"""SetFunctor(self, itk::Functor::SimilarPixelsFunctor<(unsigned long)> functor)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_SetFunctor(self, *args)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass
Create a new object of the class itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.GetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_GetFunctor,None,itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass)
itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.SetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_SetFunctor,None,itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass)
itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_GetPointer,None,itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass)
itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_swigregister
itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_swigregister(itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass)
def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass___New_orig__():
"""itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass___New_orig__()
def itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args):
"""itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass_cast(*args)
class itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass(itkConnectedComponentImageFilterPython.itkConnectedComponentImageFilterIUS2IUS2):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_ImageDimension
InputImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_InputImageDimension
SameDimensionCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_SameDimensionCheck
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_OutputEqualityComparableCheck
OutputConvertibleToUnsignedIntCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_OutputConvertibleToUnsignedIntCheck
OutputConvertibleToUnsignedLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_OutputConvertibleToUnsignedLongCheck
OutputConvertibleToLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_OutputConvertibleToLongCheck
UnsignedLongConvertibleToOutputCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_UnsignedLongConvertibleToOutputCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetFunctor(self, *args):
"""
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned short)>
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned short)>
"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_GetFunctor(self, *args)
def SetFunctor(self, *args):
"""SetFunctor(self, itk::Functor::SimilarPixelsFunctor<(unsigned short)> functor)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_SetFunctor(self, *args)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass
Create a new object of the class itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.GetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_GetFunctor,None,itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass)
itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.SetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_SetFunctor,None,itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass)
itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_GetPointer,None,itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass)
itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_swigregister
itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_swigregister(itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass)
def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass___New_orig__():
"""itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass___New_orig__()
def itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args):
"""itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass_cast(*args)
class itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass(itkConnectedComponentImageFilterPython.itkConnectedComponentImageFilterIUS3IUS3):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
ImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_ImageDimension
InputImageDimension = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_InputImageDimension
SameDimensionCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_SameDimensionCheck
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_OutputEqualityComparableCheck
OutputConvertibleToUnsignedIntCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_OutputConvertibleToUnsignedIntCheck
OutputConvertibleToUnsignedLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_OutputConvertibleToUnsignedLongCheck
OutputConvertibleToLongCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_OutputConvertibleToLongCheck
UnsignedLongConvertibleToOutputCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_UnsignedLongConvertibleToOutputCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def GetFunctor(self, *args):
"""
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned short)>
GetFunctor(self) -> itk::Functor::SimilarPixelsFunctor<(unsigned short)>
"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_GetFunctor(self, *args)
def SetFunctor(self, *args):
"""SetFunctor(self, itk::Functor::SimilarPixelsFunctor<(unsigned short)> functor)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_SetFunctor(self, *args)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass
Create a new object of the class itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.GetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_GetFunctor,None,itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass)
itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.SetFunctor = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_SetFunctor,None,itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass)
itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_GetPointer,None,itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass)
itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_swigregister
itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_swigregister(itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass)
def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass___New_orig__():
"""itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass___New_orig__()
def itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args):
"""itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass_cast(*args)
class itkScalarConnectedComponentImageFilterIUC2IUC2(itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUC2IUC2 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_OutputEqualityComparableCheck
MaskEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_MaskEqualityComparableCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def SetDistanceThreshold(self, *args):
"""SetDistanceThreshold(self, unsigned char thresh)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_SetDistanceThreshold(self, *args)
def GetDistanceThreshold(self):
"""GetDistanceThreshold(self) -> unsigned char"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_GetDistanceThreshold(self)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUC2IUC2
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC2IUC2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUC2IUC2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUC2IUC2
Create a new object of the class itkScalarConnectedComponentImageFilterIUC2IUC2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUC2IUC2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUC2IUC2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUC2IUC2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUC2IUC2.SetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_SetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUC2IUC2)
itkScalarConnectedComponentImageFilterIUC2IUC2.GetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_GetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUC2IUC2)
itkScalarConnectedComponentImageFilterIUC2IUC2.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_GetPointer,None,itkScalarConnectedComponentImageFilterIUC2IUC2)
itkScalarConnectedComponentImageFilterIUC2IUC2_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_swigregister
itkScalarConnectedComponentImageFilterIUC2IUC2_swigregister(itkScalarConnectedComponentImageFilterIUC2IUC2)
def itkScalarConnectedComponentImageFilterIUC2IUC2___New_orig__():
"""itkScalarConnectedComponentImageFilterIUC2IUC2___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2___New_orig__()
def itkScalarConnectedComponentImageFilterIUC2IUC2_cast(*args):
"""itkScalarConnectedComponentImageFilterIUC2IUC2_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC2IUC2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC2IUC2_cast(*args)
class itkScalarConnectedComponentImageFilterIUC3IUC3(itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUC3IUC3 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_OutputEqualityComparableCheck
MaskEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_MaskEqualityComparableCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def SetDistanceThreshold(self, *args):
"""SetDistanceThreshold(self, unsigned char thresh)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_SetDistanceThreshold(self, *args)
def GetDistanceThreshold(self):
"""GetDistanceThreshold(self) -> unsigned char"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_GetDistanceThreshold(self)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUC3IUC3
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC3IUC3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUC3IUC3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUC3IUC3
Create a new object of the class itkScalarConnectedComponentImageFilterIUC3IUC3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUC3IUC3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUC3IUC3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUC3IUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUC3IUC3.SetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_SetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUC3IUC3)
itkScalarConnectedComponentImageFilterIUC3IUC3.GetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_GetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUC3IUC3)
itkScalarConnectedComponentImageFilterIUC3IUC3.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_GetPointer,None,itkScalarConnectedComponentImageFilterIUC3IUC3)
itkScalarConnectedComponentImageFilterIUC3IUC3_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_swigregister
itkScalarConnectedComponentImageFilterIUC3IUC3_swigregister(itkScalarConnectedComponentImageFilterIUC3IUC3)
def itkScalarConnectedComponentImageFilterIUC3IUC3___New_orig__():
"""itkScalarConnectedComponentImageFilterIUC3IUC3___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3___New_orig__()
def itkScalarConnectedComponentImageFilterIUC3IUC3_cast(*args):
"""itkScalarConnectedComponentImageFilterIUC3IUC3_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUC3IUC3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUC3IUC3_cast(*args)
class itkScalarConnectedComponentImageFilterIUL2IUL2(itkScalarConnectedComponentImageFilterIUL2IUL2_Superclass):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUL2IUL2 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_OutputEqualityComparableCheck
MaskEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_MaskEqualityComparableCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def SetDistanceThreshold(self, *args):
"""SetDistanceThreshold(self, unsigned long thresh)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_SetDistanceThreshold(self, *args)
def GetDistanceThreshold(self):
"""GetDistanceThreshold(self) -> unsigned long"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_GetDistanceThreshold(self)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUL2IUL2
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL2IUL2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUL2IUL2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUL2IUL2
Create a new object of the class itkScalarConnectedComponentImageFilterIUL2IUL2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUL2IUL2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUL2IUL2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUL2IUL2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUL2IUL2.SetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_SetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUL2IUL2)
itkScalarConnectedComponentImageFilterIUL2IUL2.GetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_GetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUL2IUL2)
itkScalarConnectedComponentImageFilterIUL2IUL2.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_GetPointer,None,itkScalarConnectedComponentImageFilterIUL2IUL2)
itkScalarConnectedComponentImageFilterIUL2IUL2_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_swigregister
itkScalarConnectedComponentImageFilterIUL2IUL2_swigregister(itkScalarConnectedComponentImageFilterIUL2IUL2)
def itkScalarConnectedComponentImageFilterIUL2IUL2___New_orig__():
"""itkScalarConnectedComponentImageFilterIUL2IUL2___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2___New_orig__()
def itkScalarConnectedComponentImageFilterIUL2IUL2_cast(*args):
"""itkScalarConnectedComponentImageFilterIUL2IUL2_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL2IUL2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL2IUL2_cast(*args)
class itkScalarConnectedComponentImageFilterIUL3IUL3(itkScalarConnectedComponentImageFilterIUL3IUL3_Superclass):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUL3IUL3 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_OutputEqualityComparableCheck
MaskEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_MaskEqualityComparableCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def SetDistanceThreshold(self, *args):
"""SetDistanceThreshold(self, unsigned long thresh)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_SetDistanceThreshold(self, *args)
def GetDistanceThreshold(self):
"""GetDistanceThreshold(self) -> unsigned long"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_GetDistanceThreshold(self)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUL3IUL3
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL3IUL3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUL3IUL3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUL3IUL3
Create a new object of the class itkScalarConnectedComponentImageFilterIUL3IUL3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUL3IUL3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUL3IUL3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUL3IUL3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUL3IUL3.SetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_SetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUL3IUL3)
itkScalarConnectedComponentImageFilterIUL3IUL3.GetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_GetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUL3IUL3)
itkScalarConnectedComponentImageFilterIUL3IUL3.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_GetPointer,None,itkScalarConnectedComponentImageFilterIUL3IUL3)
itkScalarConnectedComponentImageFilterIUL3IUL3_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_swigregister
itkScalarConnectedComponentImageFilterIUL3IUL3_swigregister(itkScalarConnectedComponentImageFilterIUL3IUL3)
def itkScalarConnectedComponentImageFilterIUL3IUL3___New_orig__():
"""itkScalarConnectedComponentImageFilterIUL3IUL3___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3___New_orig__()
def itkScalarConnectedComponentImageFilterIUL3IUL3_cast(*args):
"""itkScalarConnectedComponentImageFilterIUL3IUL3_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUL3IUL3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUL3IUL3_cast(*args)
class itkScalarConnectedComponentImageFilterIUS2IUS2(itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUS2IUS2 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_OutputEqualityComparableCheck
MaskEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_MaskEqualityComparableCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def SetDistanceThreshold(self, *args):
"""SetDistanceThreshold(self, unsigned short thresh)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_SetDistanceThreshold(self, *args)
def GetDistanceThreshold(self):
"""GetDistanceThreshold(self) -> unsigned short"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_GetDistanceThreshold(self)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUS2IUS2
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS2IUS2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUS2IUS2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUS2IUS2
Create a new object of the class itkScalarConnectedComponentImageFilterIUS2IUS2 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUS2IUS2.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUS2IUS2.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUS2IUS2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUS2IUS2.SetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_SetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUS2IUS2)
itkScalarConnectedComponentImageFilterIUS2IUS2.GetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_GetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUS2IUS2)
itkScalarConnectedComponentImageFilterIUS2IUS2.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_GetPointer,None,itkScalarConnectedComponentImageFilterIUS2IUS2)
itkScalarConnectedComponentImageFilterIUS2IUS2_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_swigregister
itkScalarConnectedComponentImageFilterIUS2IUS2_swigregister(itkScalarConnectedComponentImageFilterIUS2IUS2)
def itkScalarConnectedComponentImageFilterIUS2IUS2___New_orig__():
"""itkScalarConnectedComponentImageFilterIUS2IUS2___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2___New_orig__()
def itkScalarConnectedComponentImageFilterIUS2IUS2_cast(*args):
"""itkScalarConnectedComponentImageFilterIUS2IUS2_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS2IUS2"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS2IUS2_cast(*args)
class itkScalarConnectedComponentImageFilterIUS3IUS3(itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass):
"""Proxy of C++ itkScalarConnectedComponentImageFilterIUS3IUS3 class"""
thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag')
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
__repr__ = _swig_repr
InputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_InputEqualityComparableCheck
OutputEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_OutputEqualityComparableCheck
MaskEqualityComparableCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_MaskEqualityComparableCheck
OutputIncrementDecrementOperatorsCheck = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_OutputIncrementDecrementOperatorsCheck
def __New_orig__():
"""__New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3___New_orig__()
__New_orig__ = staticmethod(__New_orig__)
def SetDistanceThreshold(self, *args):
"""SetDistanceThreshold(self, unsigned short thresh)"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_SetDistanceThreshold(self, *args)
def GetDistanceThreshold(self):
"""GetDistanceThreshold(self) -> unsigned short"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_GetDistanceThreshold(self)
__swig_destroy__ = _itkScalarConnectedComponentImageFilterPython.delete_itkScalarConnectedComponentImageFilterIUS3IUS3
def cast(*args):
"""cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS3IUS3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_cast(*args)
cast = staticmethod(cast)
def GetPointer(self):
"""GetPointer(self) -> itkScalarConnectedComponentImageFilterIUS3IUS3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_GetPointer(self)
def New(*args, **kargs):
"""New() -> itkScalarConnectedComponentImageFilterIUS3IUS3
Create a new object of the class itkScalarConnectedComponentImageFilterIUS3IUS3 and set the input and the parameters if some
named or non-named arguments are passed to that method.
New() tries to assign all the non named parameters to the input of the new objects - the
first non named parameter in the first input, etc.
The named parameters are used by calling the method with the same name prefixed by 'Set'.
Ex:
itkScalarConnectedComponentImageFilterIUS3IUS3.New( reader, Threshold=10 )
is (most of the time) equivalent to:
obj = itkScalarConnectedComponentImageFilterIUS3IUS3.New()
obj.SetInput( 0, reader.GetOutput() )
obj.SetThreshold( 10 )
"""
obj = itkScalarConnectedComponentImageFilterIUS3IUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj
New = staticmethod(New)
itkScalarConnectedComponentImageFilterIUS3IUS3.SetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_SetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUS3IUS3)
itkScalarConnectedComponentImageFilterIUS3IUS3.GetDistanceThreshold = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_GetDistanceThreshold,None,itkScalarConnectedComponentImageFilterIUS3IUS3)
itkScalarConnectedComponentImageFilterIUS3IUS3.GetPointer = new_instancemethod(_itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_GetPointer,None,itkScalarConnectedComponentImageFilterIUS3IUS3)
itkScalarConnectedComponentImageFilterIUS3IUS3_swigregister = _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_swigregister
itkScalarConnectedComponentImageFilterIUS3IUS3_swigregister(itkScalarConnectedComponentImageFilterIUS3IUS3)
def itkScalarConnectedComponentImageFilterIUS3IUS3___New_orig__():
"""itkScalarConnectedComponentImageFilterIUS3IUS3___New_orig__()"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3___New_orig__()
def itkScalarConnectedComponentImageFilterIUS3IUS3_cast(*args):
"""itkScalarConnectedComponentImageFilterIUS3IUS3_cast(itkLightObject obj) -> itkScalarConnectedComponentImageFilterIUS3IUS3"""
return _itkScalarConnectedComponentImageFilterPython.itkScalarConnectedComponentImageFilterIUS3IUS3_cast(*args)
| [
"fede.anne95@hotmail.it"
] | fede.anne95@hotmail.it |
ff86504950716ef63ae817247b33c8cfba3d5316 | 92bfcfaedb69b0d5c032f6a9b3ad70c0e06f7c53 | /ex12.py | 9622ce88c3c10d6bed96f9a1bb3fcd9bd5f1c55e | [] | no_license | hellstrikes13/sudipython | cf529ed60cca09afa185f3b56fe7ce17059212b0 | a789fc33cdab12d64ab674cb1c2ad7849617251f | refs/heads/master | 2022-01-03T09:12:14.192724 | 2021-12-23T05:38:31 | 2021-12-23T05:38:31 | 184,371,582 | 2 | 1 | null | 2021-12-23T05:38:31 | 2019-05-01T05:07:18 | Python | UTF-8 | Python | false | false | 139 | py | age = raw_input("age: ")
tall = raw_input("height: ")
wt = raw_input("weight: ")
print 'so u\'r %r old , %r tall and %r kg' %(age,tall,wt)
| [
"hellstrikes13@gmail.com"
] | hellstrikes13@gmail.com |
303ba6a7930d88999d35935f4a43741846fc8d9c | 57f5cad2409ee6c8af646a0f95b37431bddeb946 | /learning-python-application-development/chapter-05/wargame/attackoftheorcs.py | f76697ec7c0a3cd66fa2bd999a395c27d58b9021 | [] | no_license | johannesgiorgis/python-learning | ebb7991c83afd4f066cd487715c1f82d69d81ef9 | fb1d13e839d3f1a2ed806ae943853984cda17354 | refs/heads/master | 2022-09-30T12:21:11.517259 | 2020-06-07T06:54:13 | 2020-06-07T06:54:13 | 239,886,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,732 | py | """
Attack of the Orcs v2.0.0
"""
import random
from hut import Hut
from knight import Knight
from orcrider import OrcRider
from gameutils import print_bold
class AttackOfTheOrcs:
"""Main class to play Attack of The Orcs game
:ivar huts: List object to hold instances of `Hut` class.
:ivar player: Represents the player playing this game. This is an
instance of class `Knight` in current implementation.
.. seealso:: :py:meth:`self.play` where the main action happens.
"""
def __init__(self):
self.huts = []
self.player = None
self.num_huts = 5 # use this to avoid hard coding
def get_occupants(self) -> list:
"""Return a list of occupant types for all huts.
This is mainly used for printing information on current status of the hut
(whether unoccupied or acquired etc)
If the occupant is not `None` the occupant type will be 'enemy' or
'friend'. But if there is no occupant or is already 'acquired' the
occupant_type will display that information instead. See
`Hut.get_occupant_type()` for more details.
Return a list that collects this information from all the huts.
This is a list comprehension example. More on the list comprehension
in a chapter on Performance.
:return: A list containing occupant types (strings)
.. seealso:: :py:meth:`Hut.get_occupant_type`
.. TODO::
Prone to bugs if self.huts is not populated.
Chapter 2 talks about catching exceptions
"""
return [x.get_occupant_type() for x in self.huts]
def show_game_mission(self):
"""Print the game mission in the console"""
print_bold("Mission:")
print(" 1. Fight with the enemy.")
print(" 2. Bring all the huts in the village under your control")
print("---------------------------------------------------------\n")
def _process_user_choice(self) -> int:
"""Process the user input for choice of hut to enter
Returns the hut number to enter based on the user input. This method
makes sure that the hut number user has entered is valid. If not, it
prompts the user to re-enter this information.
:return: hut index to enter.
"""
verifying_choice = True
idx = 0
print(f"Current occupants: {self.get_occupants()}")
while verifying_choice:
user_choice = input("Choose a hut number to enter (1-5): ")
idx = int(user_choice)
if self.huts[idx - 1].is_acquired:
print(
"You have already acquired this hut. Try again."
"<INFO: You can NOT get healed in already acquired hut.>"
)
else:
verifying_choice = False
return idx
def _occupy_huts(self):
"""Randomly occupy the huts with one of: friend, enemy or 'None'
.. todo::
Here we assume there are exactly 5 huts. As an exercise, make it a
user input. Note that after such change, the unit test is expected to
fail!
"""
for i in range(self.num_huts):
choice_list = ["enemy", "friend", None]
computer_choice = random.choice(choice_list)
if computer_choice == "enemy":
name = "enemy-" + str(i + 1)
self.huts.append(Hut(i + 1, OrcRider(name)))
elif computer_choice == "friend":
name = "knight-" + str(i + 1)
self.huts.append(Hut(i + 1, Knight(name)))
else:
self.huts.append(Hut(i + 1, computer_choice))
def setup_game_scenario(self):
"""Create player and huts and then randomly pre-occupy huts...
The huts might be left empty as well.This method also prints the
game mission which could be refactored out of this as an exercise.
.. seealso:: :py:meth:`self.play` ,
:py:meth:`self._occupy_huts`
"""
self.player = Knight()
self._occupy_huts()
self.show_game_mission()
self.player.show_health(bold=True)
def play(self):
"""Workhorse method to play the game.
Controls the high level logic to play the game. This is called from
the main program to begin the game execution.
In summary, this method has the high level logic that does the following
by calling appropriate functionality:
* Set up instance variables for the game
* Accept the user input for hut number to enter
* Attempt to acquire the hut ( :py:meth:`Knight.acquire_hut` )
* Determine if the player wins or loses.
.. seealso:: :py:meth:`setup_game_scenario`,
:py:meth:`Knight.acquire_hut`
"""
# Create a Knight instance, create huts and preoccupy them with
# a game character instance (or leave empty)
self.setup_game_scenario()
# Initial setup is done, now the main play logic
acquired_hut_counter = 0
while acquired_hut_counter < self.num_huts:
idx = self._process_user_choice()
self.player.acquire_hut(self.huts[idx - 1])
if self.player.health_meter <= 0:
print_bold("YOU LOSE :( Better luck next time")
break
if self.huts[idx - 1].is_acquired:
acquired_hut_counter += 1
if acquired_hut_counter == self.num_huts:
print_bold("Congratulations! YOU WIN!!!")
if __name__ == "__main__":
print("Starting game...")
game = AttackOfTheOrcs()
game.play()
| [
"johannesgiorgis@users.noreply.github.com"
] | johannesgiorgis@users.noreply.github.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.