hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6118ab0c9ad03220b1d53b73c0792540f08313ef
| 34,292
|
py
|
Python
|
mAP/main.py
|
juwangvsu/yolov3.keras
|
4816fd25f30a420e52877657929aa1505e9fa8c3
|
[
"MIT"
] | null | null | null |
mAP/main.py
|
juwangvsu/yolov3.keras
|
4816fd25f30a420e52877657929aa1505e9fa8c3
|
[
"MIT"
] | null | null | null |
mAP/main.py
|
juwangvsu/yolov3.keras
|
4816fd25f30a420e52877657929aa1505e9fa8c3
|
[
"MIT"
] | null | null | null |
import glob
import json
import os
import shutil
import operator
import sys
import argparse
import math
import numpy as np
import matplotlib.pyplot as plt
MINOVERLAP = 0.5 # default value (defined in the PASCAL VOC2012 challenge)
parser = argparse.ArgumentParser()
parser.add_argument('-na', '--no-animation', help="no animation is shown.", action="store_true")
parser.add_argument('-np', '--no-plot', help="no plot is shown.", action="store_true")
parser.add_argument('-q', '--quiet', help="minimalistic console output.", action="store_true")
# argparse receiving list of classes to be ignored
parser.add_argument('-i', '--ignore', nargs='+', type=str, help="ignore a list of classes.")
# argparse receiving list of classes with specific IoU (e.g., python main.py --set-class-iou person 0.7)
parser.add_argument('--set-class-iou', nargs='+', type=str, help="set IoU for a specific class.")
args = parser.parse_args()
'''
0,0 ------> x (width)
|
| (Left,Top)
| *_________
| | |
| |
y |_________|
(height) *
(Right,Bottom)
'''
# if there are no classes to ignore then replace None by empty list
if args.ignore is None:
args.ignore = []
specific_iou_flagged = False
if args.set_class_iou is not None:
specific_iou_flagged = True
# make sure that the cwd() is the location of the python script (so that every path makes sense)
os.chdir(os.path.dirname(os.path.abspath(__file__)))
GT_PATH = os.path.join(os.getcwd(), 'input', 'ground-truth')
DR_PATH = os.path.join(os.getcwd(), 'input', 'detection-results')
# if there are no images then no animation can be shown
IMG_PATH = os.path.join(os.getcwd(), 'input', 'images-optional')
if os.path.exists(IMG_PATH):
for dirpath, dirnames, files in os.walk(IMG_PATH):
if not files:
# no image files found
args.no_animation = True
else:
args.no_animation = True
# try to import OpenCV if the user didn't choose the option --no-animation
show_animation = False
if not args.no_animation:
try:
import cv2
show_animation = True
except ImportError:
print("\"opencv-python\" not found, please install to visualize the results.")
args.no_animation = True
# try to import Matplotlib if the user didn't choose the option --no-plot
draw_plot = False
if not args.no_plot:
try:
import matplotlib.pyplot as plt
draw_plot = True
except ImportError:
print("\"matplotlib\" not found, please install it to get the resulting plots.")
args.no_plot = True
def log_average_miss_rate(precision, fp_cumsum, num_images):
"""
log-average miss rate:
Calculated by averaging miss rates at 9 evenly spaced FPPI points
between 10e-2 and 10e0, in log-space.
output:
lamr | log-average miss rate
mr | miss rate
fppi | false positives per image
references:
[1] Dollar, Piotr, et al. "Pedestrian Detection: An Evaluation of the
State of the Art." Pattern Analysis and Machine Intelligence, IEEE
Transactions on 34.4 (2012): 743 - 761.
"""
# if there were no detections of that class
if precision.size == 0:
lamr = 0
mr = 1
fppi = 0
return lamr, mr, fppi
fppi = fp_cumsum / float(num_images)
mr = (1 - precision)
fppi_tmp = np.insert(fppi, 0, -1.0)
mr_tmp = np.insert(mr, 0, 1.0)
# Use 9 evenly spaced reference points in log-space
ref = np.logspace(-2.0, 0.0, num = 9)
for i, ref_i in enumerate(ref):
# np.where() will always find at least 1 index, since min(ref) = 0.01 and min(fppi_tmp) = -1.0
j = np.where(fppi_tmp <= ref_i)[-1][-1]
ref[i] = mr_tmp[j]
# log(0) is undefined, so we use the np.maximum(1e-10, ref)
lamr = math.exp(np.mean(np.log(np.maximum(1e-10, ref))))
return lamr, mr, fppi
"""
throw error and exit
"""
def error(msg):
print(msg)
sys.exit(0)
"""
check if the number is a float between 0.0 and 1.0
"""
def is_float_between_0_and_1(value):
try:
val = float(value)
if val > 0.0 and val < 1.0:
return True
else:
return False
except ValueError:
return False
"""
Calculate the AP given the recall and precision array
1st) We compute a version of the measured precision/recall curve with
precision monotonically decreasing
2nd) We compute the AP as the area under this curve by numerical integration.
"""
def voc_ap(rec, prec):
"""
--- Official matlab code VOC2012---
mrec=[0 ; rec ; 1];
mpre=[0 ; prec ; 0];
for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
end
i=find(mrec(2:end)~=mrec(1:end-1))+1;
ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
rec.insert(0, 0.0) # insert 0.0 at begining of list
rec.append(1.0) # insert 1.0 at end of list
mrec = rec[:]
prec.insert(0, 0.0) # insert 0.0 at begining of list
prec.append(0.0) # insert 0.0 at end of list
mpre = prec[:]
"""
This part makes the precision monotonically decreasing
(goes from the end to the beginning)
matlab: for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
"""
# matlab indexes start in 1 but python in 0, so I have to do:
# range(start=(len(mpre) - 2), end=0, step=-1)
# also the python function range excludes the end, resulting in:
# range(start=(len(mpre) - 2), end=-1, step=-1)
for i in range(len(mpre)-2, -1, -1):
mpre[i] = max(mpre[i], mpre[i+1])
"""
This part creates a list of indexes where the recall changes
matlab: i=find(mrec(2:end)~=mrec(1:end-1))+1;
"""
i_list = []
for i in range(1, len(mrec)):
if mrec[i] != mrec[i-1]:
i_list.append(i) # if it was matlab would be i + 1
"""
The Average Precision (AP) is the area under the curve
(numerical integration)
matlab: ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
ap = 0.0
for i in i_list:
ap += ((mrec[i]-mrec[i-1])*mpre[i])
return ap, mrec, mpre
"""
Convert the lines of a file to a list
"""
def file_lines_to_list(path):
# open txt file lines to a list
with open(path) as f:
content = f.readlines()
# remove whitespace characters like `\n` at the end of each line
content = [x.strip() for x in content]
return content
"""
Draws text in image
"""
def draw_text_in_image(img, text, pos, color, line_width):
font = cv2.FONT_HERSHEY_PLAIN
fontScale = 1
lineType = 1
bottomLeftCornerOfText = pos
cv2.putText(img, text,
bottomLeftCornerOfText,
font,
fontScale,
color,
lineType)
text_width, _ = cv2.getTextSize(text, font, fontScale, lineType)[0]
return img, (line_width + text_width)
"""
Plot - adjust axes
"""
def adjust_axes(r, t, fig, axes):
# get text width for re-scaling
bb = t.get_window_extent(renderer=r)
text_width_inches = bb.width / fig.dpi
# get axis width in inches
current_fig_width = fig.get_figwidth()
new_fig_width = current_fig_width + text_width_inches
propotion = new_fig_width / current_fig_width
# get axis limit
x_lim = axes.get_xlim()
axes.set_xlim([x_lim[0], x_lim[1]*propotion])
"""
Draw plot using Matplotlib
"""
def draw_plot_func(dictionary, n_classes, window_title, plot_title, x_label, output_path, to_show, plot_color, true_p_bar):
# sort the dictionary by decreasing value, into a list of tuples
sorted_dic_by_value = sorted(dictionary.items(), key=operator.itemgetter(1))
# unpacking the list of tuples into two lists
sorted_keys, sorted_values = zip(*sorted_dic_by_value)
#
if true_p_bar != "":
"""
Special case to draw in:
- green -> TP: True Positives (object detected and matches ground-truth)
- red -> FP: False Positives (object detected but does not match ground-truth)
- orange -> FN: False Negatives (object not detected but present in the ground-truth)
"""
fp_sorted = []
tp_sorted = []
for key in sorted_keys:
fp_sorted.append(dictionary[key] - true_p_bar[key])
tp_sorted.append(true_p_bar[key])
plt.barh(range(n_classes), fp_sorted, align='center', color='crimson', label='False Positive')
plt.barh(range(n_classes), tp_sorted, align='center', color='forestgreen', label='True Positive', left=fp_sorted)
# add legend
plt.legend(loc='lower right')
"""
Write number on side of bar
"""
fig = plt.gcf() # gcf - get current figure
axes = plt.gca()
r = fig.canvas.get_renderer()
for i, val in enumerate(sorted_values):
fp_val = fp_sorted[i]
tp_val = tp_sorted[i]
fp_str_val = " " + str(fp_val)
tp_str_val = fp_str_val + " " + str(tp_val)
# trick to paint multicolor with offset:
# first paint everything and then repaint the first number
t = plt.text(val, i, tp_str_val, color='forestgreen', va='center', fontweight='bold')
plt.text(val, i, fp_str_val, color='crimson', va='center', fontweight='bold')
if i == (len(sorted_values)-1): # largest bar
adjust_axes(r, t, fig, axes)
else:
plt.barh(range(n_classes), sorted_values, color=plot_color)
"""
Write number on side of bar
"""
fig = plt.gcf() # gcf - get current figure
axes = plt.gca()
r = fig.canvas.get_renderer()
for i, val in enumerate(sorted_values):
str_val = " " + str(val) # add a space before
if val < 1.0:
str_val = " {0:.2f}".format(val)
t = plt.text(val, i, str_val, color=plot_color, va='center', fontweight='bold')
# re-set axes to show number inside the figure
if i == (len(sorted_values)-1): # largest bar
adjust_axes(r, t, fig, axes)
# set window title
fig.canvas.set_window_title(window_title)
# write classes in y axis
tick_font_size = 12
plt.yticks(range(n_classes), sorted_keys, fontsize=tick_font_size)
"""
Re-scale height accordingly
"""
init_height = fig.get_figheight()
# comput the matrix height in points and inches
dpi = fig.dpi
height_pt = n_classes * (tick_font_size * 1.4) # 1.4 (some spacing)
height_in = height_pt / dpi
# compute the required figure height
top_margin = 0.15 # in percentage of the figure height
bottom_margin = 0.05 # in percentage of the figure height
figure_height = height_in / (1 - top_margin - bottom_margin)
# set new height
if figure_height > init_height:
fig.set_figheight(figure_height)
# set plot title
plt.title(plot_title, fontsize=14)
# set axis titles
# plt.xlabel('classes')
plt.xlabel(x_label, fontsize='large')
# adjust size of window
fig.tight_layout()
# save the plot
fig.savefig(output_path)
# show image
if to_show:
plt.show()
# close the plot
plt.close()
"""
Create a ".temp_files/" and "results/" directory
"""
TEMP_FILES_PATH = ".temp_files"
if not os.path.exists(TEMP_FILES_PATH): # if it doesn't exist already
os.makedirs(TEMP_FILES_PATH)
results_files_path = "results"
if os.path.exists(results_files_path): # if it exist already
# reset the results directory
shutil.rmtree(results_files_path)
os.makedirs(results_files_path)
if draw_plot:
os.makedirs(os.path.join(results_files_path, "classes"))
if show_animation:
os.makedirs(os.path.join(results_files_path, "images", "detections_one_by_one"))
"""
ground-truth
Load each of the ground-truth files into a temporary ".json" file.
Create a list of all the class names present in the ground-truth (gt_classes).
"""
# get a list with the ground-truth files
ground_truth_files_list = glob.glob(GT_PATH + '/*.txt')
if len(ground_truth_files_list) == 0:
error("Error: No ground-truth files found!")
ground_truth_files_list.sort()
# dictionary with counter per class
gt_counter_per_class = {}
counter_images_per_class = {}
for txt_file in ground_truth_files_list:
#print(txt_file)
file_id = txt_file.split(".txt", 1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
# check if there is a correspondent detection-results file
temp_path = os.path.join(DR_PATH, (file_id + ".txt"))
if not os.path.exists(temp_path):
error_msg = "Error. File not found: {}\n".format(temp_path)
error_msg += "(You can avoid this error message by running extra/intersect-gt-and-dr.py)"
error(error_msg)
lines_list = file_lines_to_list(txt_file)
# create ground-truth dictionary
bounding_boxes = []
is_difficult = False
already_seen_classes = []
for line in lines_list:
try:
if "difficult" in line:
class_name, left, top, right, bottom, _difficult = line.split()
is_difficult = True
else:
class_name, left, top, right, bottom = line.split()
except ValueError:
error_msg = "Error: File " + txt_file + " in the wrong format.\n"
error_msg += " Expected: <class_name> <left> <top> <right> <bottom> ['difficult']\n"
error_msg += " Received: " + line
error_msg += "\n\nIf you have a <class_name> with spaces between words you should remove them\n"
error_msg += "by running the script \"remove_space.py\" or \"rename_class.py\" in the \"extra/\" folder."
error(error_msg)
# check if class is in the ignore list, if yes skip
if class_name in args.ignore:
continue
bbox = left + " " + top + " " + right + " " +bottom
if is_difficult:
bounding_boxes.append({"class_name":class_name, "bbox":bbox, "used":False, "difficult":True})
is_difficult = False
else:
bounding_boxes.append({"class_name":class_name, "bbox":bbox, "used":False})
# count that object
if class_name in gt_counter_per_class:
gt_counter_per_class[class_name] += 1
else:
# if class didn't exist yet
gt_counter_per_class[class_name] = 1
if class_name not in already_seen_classes:
if class_name in counter_images_per_class:
counter_images_per_class[class_name] += 1
else:
# if class didn't exist yet
counter_images_per_class[class_name] = 1
already_seen_classes.append(class_name)
# dump bounding_boxes into a ".json" file
with open(TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
gt_classes = list(gt_counter_per_class.keys())
# let's sort the classes alphabetically
gt_classes = sorted(gt_classes)
n_classes = len(gt_classes)
#print(gt_classes)
#print(gt_counter_per_class)
"""
Check format of the flag --set-class-iou (if used)
e.g. check if class exists
"""
if specific_iou_flagged:
n_args = len(args.set_class_iou)
error_msg = \
'\n --set-class-iou [class_1] [IoU_1] [class_2] [IoU_2] [...]'
if n_args % 2 != 0:
error('Error, missing arguments. Flag usage:' + error_msg)
# [class_1] [IoU_1] [class_2] [IoU_2]
# specific_iou_classes = ['class_1', 'class_2']
specific_iou_classes = args.set_class_iou[::2] # even
# iou_list = ['IoU_1', 'IoU_2']
iou_list = args.set_class_iou[1::2] # odd
if len(specific_iou_classes) != len(iou_list):
error('Error, missing arguments. Flag usage:' + error_msg)
for tmp_class in specific_iou_classes:
if tmp_class not in gt_classes:
error('Error, unknown class \"' + tmp_class + '\". Flag usage:' + error_msg)
for num in iou_list:
if not is_float_between_0_and_1(num):
error('Error, IoU must be between 0.0 and 1.0. Flag usage:' + error_msg)
"""
detection-results
Load each of the detection-results files into a temporary ".json" file.
"""
# get a list with the detection-results files
dr_files_list = glob.glob(DR_PATH + '/*.txt')
dr_files_list.sort()
for class_index, class_name in enumerate(gt_classes):
bounding_boxes = []
for txt_file in dr_files_list:
#print(txt_file)
# the first time it checks if all the corresponding ground-truth files exist
file_id = txt_file.split(".txt",1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
temp_path = os.path.join(GT_PATH, (file_id + ".txt"))
if class_index == 0:
if not os.path.exists(temp_path):
error_msg = "Error. File not found: {}\n".format(temp_path)
error_msg += "(You can avoid this error message by running extra/intersect-gt-and-dr.py)"
error(error_msg)
lines = file_lines_to_list(txt_file)
if lines[-1] == '':
lines.remove( lines[-1])
for line in lines:
try:
tmp_class_name, confidence, left, top, right, bottom = line.split()
except ValueError:
error_msg = "Error: File " + txt_file + " in the wrong format.\n"
error_msg += " Expected: <class_name> <confidence> <left> <top> <right> <bottom>\n"
error_msg += " Received: " + line
error(error_msg)
if tmp_class_name == class_name:
#print("match")
bbox = left + " " + top + " " + right + " " +bottom
bounding_boxes.append({"confidence":confidence, "file_id":file_id, "bbox":bbox})
#print(bounding_boxes)
# sort detection-results by decreasing confidence
bounding_boxes.sort(key=lambda x:float(x['confidence']), reverse=True)
with open(TEMP_FILES_PATH + "/" + class_name + "_dr.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
"""
Calculate the AP for each class
"""
sum_AP = 0.0
ap_dictionary = {}
lamr_dictionary = {}
# open file to store the results
with open(results_files_path + "/results.txt", 'w') as results_file:
results_file.write("# AP and precision/recall per class\n")
count_true_positives = {}
for class_index, class_name in enumerate(gt_classes):
count_true_positives[class_name] = 0
"""
Load detection-results of that class
"""
dr_file = TEMP_FILES_PATH + "/" + class_name + "_dr.json"
dr_data = json.load(open(dr_file))
"""
Assign detection-results to ground-truth objects
"""
nd = len(dr_data)
tp = [0] * nd # creates an array of zeros of size nd
fp = [0] * nd
for idx, detection in enumerate(dr_data):
file_id = detection["file_id"]
if show_animation:
# find ground truth image
ground_truth_img = glob.glob1(IMG_PATH, file_id + ".*")
#tifCounter = len(glob.glob1(myPath,"*.tif"))
if len(ground_truth_img) == 0:
error("Error. Image not found with id: " + file_id)
elif len(ground_truth_img) > 1:
error("Error. Multiple image with id: " + file_id)
else: # found image
#print(IMG_PATH + "/" + ground_truth_img[0])
# Load image
img = cv2.imread(IMG_PATH + "/" + ground_truth_img[0])
# load image with draws of multiple detections
img_cumulative_path = results_files_path + "/images/" + ground_truth_img[0]
if os.path.isfile(img_cumulative_path):
img_cumulative = cv2.imread(img_cumulative_path)
else:
img_cumulative = img.copy()
# Add bottom border to image
bottom_border = 60
BLACK = [0, 0, 0]
img = cv2.copyMakeBorder(img, 0, bottom_border, 0, 0, cv2.BORDER_CONSTANT, value=BLACK)
# assign detection-results to ground truth object if any
# open ground-truth with that file_id
gt_file = TEMP_FILES_PATH + "/" + file_id + "_ground_truth.json"
ground_truth_data = json.load(open(gt_file))
ovmax = -1
gt_match = -1
# load detected object bounding-box
bb = [ float(x) for x in detection["bbox"].split() ]
for obj in ground_truth_data:
# look for a class_name match
if obj["class_name"] == class_name:
bbgt = [ float(x) for x in obj["bbox"].split() ]
bi = [max(bb[0],bbgt[0]), max(bb[1],bbgt[1]), min(bb[2],bbgt[2]), min(bb[3],bbgt[3])]
iw = bi[2] - bi[0] + 1
ih = bi[3] - bi[1] + 1
if iw > 0 and ih > 0:
# compute overlap (IoU) = area of intersection / area of union
ua = (bb[2] - bb[0] + 1) * (bb[3] - bb[1] + 1) + (bbgt[2] - bbgt[0]
+ 1) * (bbgt[3] - bbgt[1] + 1) - iw * ih
ov = iw * ih / ua
if ov > ovmax:
ovmax = ov
gt_match = obj
# assign detection as true positive/don't care/false positive
if show_animation:
status = "NO MATCH FOUND!" # status is only used in the animation
# set minimum overlap
min_overlap = MINOVERLAP
if specific_iou_flagged:
if class_name in specific_iou_classes:
index = specific_iou_classes.index(class_name)
min_overlap = float(iou_list[index])
if ovmax >= min_overlap:
if "difficult" not in gt_match:
if not bool(gt_match["used"]):
# true positive
tp[idx] = 1
gt_match["used"] = True
count_true_positives[class_name] += 1
# update the ".json" file
with open(gt_file, 'w') as f:
f.write(json.dumps(ground_truth_data))
if show_animation:
status = "MATCH!"
else:
# false positive (multiple detection)
fp[idx] = 1
if show_animation:
status = "REPEATED MATCH!"
else:
# false positive
fp[idx] = 1
if ovmax > 0:
status = "INSUFFICIENT OVERLAP"
"""
Draw image to show animation
"""
if show_animation:
height, widht = img.shape[:2]
# colors (OpenCV works with BGR)
white = (255,255,255)
light_blue = (255,200,100)
green = (0,255,0)
light_red = (30,30,255)
# 1st line
margin = 10
v_pos = int(height - margin - (bottom_border / 2.0))
text = "Image: " + ground_truth_img[0] + " "
img, line_width = draw_text_in_image(img, text, (margin, v_pos), white, 0)
text = "Class [" + str(class_index) + "/" + str(n_classes) + "]: " + class_name + " "
img, line_width = draw_text_in_image(img, text, (margin + line_width, v_pos), light_blue, line_width)
if ovmax != -1:
color = light_red
if status == "INSUFFICIENT OVERLAP":
text = "IoU: {0:.2f}% ".format(ovmax*100) + "< {0:.2f}% ".format(min_overlap*100)
else:
text = "IoU: {0:.2f}% ".format(ovmax*100) + ">= {0:.2f}% ".format(min_overlap*100)
color = green
img, _ = draw_text_in_image(img, text, (margin + line_width, v_pos), color, line_width)
# 2nd line
v_pos += int(bottom_border / 2.0)
rank_pos = str(idx+1) # rank position (idx starts at 0)
text = "Detection #rank: " + rank_pos + " confidence: {0:.2f}% ".format(float(detection["confidence"])*100)
img, line_width = draw_text_in_image(img, text, (margin, v_pos), white, 0)
color = light_red
if status == "MATCH!":
color = green
text = "Result: " + status + " "
img, line_width = draw_text_in_image(img, text, (margin + line_width, v_pos), color, line_width)
font = cv2.FONT_HERSHEY_SIMPLEX
if ovmax > 0: # if there is intersections between the bounding-boxes
bbgt = [ int(round(float(x))) for x in gt_match["bbox"].split() ]
cv2.rectangle(img,(bbgt[0],bbgt[1]),(bbgt[2],bbgt[3]),light_blue,2)
cv2.rectangle(img_cumulative,(bbgt[0],bbgt[1]),(bbgt[2],bbgt[3]),light_blue,2)
cv2.putText(img_cumulative, class_name, (bbgt[0],bbgt[1] - 5), font, 0.6, light_blue, 1, cv2.LINE_AA)
bb = [int(i) for i in bb]
cv2.rectangle(img,(bb[0],bb[1]),(bb[2],bb[3]),color,2)
cv2.rectangle(img_cumulative,(bb[0],bb[1]),(bb[2],bb[3]),color,2)
cv2.putText(img_cumulative, class_name, (bb[0],bb[1] - 5), font, 0.6, color, 1, cv2.LINE_AA)
# show image
plt.imshow(img, cmap = 'gray', interpolation = 'bicubic')
plt.show()
#cv2.imshow("Animation", img)
#cv2.waitKey(20) # show for 20 ms
# save image to results
output_img_path = results_files_path + "/images/detections_one_by_one/" + class_name + "_detection" + str(idx) + ".jpg"
#保存图片
#cv2.imwrite(output_img_path, img)
# save the image with all the objects drawn to it
#cv2.imwrite(img_cumulative_path, img_cumulative)
#print(tp)
# compute precision/recall
cumsum = 0
for idx, val in enumerate(fp):
fp[idx] += cumsum
cumsum += val
cumsum = 0
for idx, val in enumerate(tp):
tp[idx] += cumsum
cumsum += val
#print(tp)
rec = tp[:]
for idx, val in enumerate(tp):
rec[idx] = float(tp[idx]) / gt_counter_per_class[class_name]
#print(rec)
prec = tp[:]
for idx, val in enumerate(tp):
prec[idx] = float(tp[idx]) / (fp[idx] + tp[idx])
#print(prec)
ap, mrec, mprec = voc_ap(rec[:], prec[:])
sum_AP += ap
text = "{0:.2f}%".format(ap*100) + " = " + class_name + " AP " #class_name + " AP = {0:.2f}%".format(ap*100)
"""
Write to results.txt
"""
rounded_prec = [ '%.2f' % elem for elem in prec ]
rounded_rec = [ '%.2f' % elem for elem in rec ]
results_file.write(text + "\n Precision: " + str(rounded_prec) + "\n Recall :" + str(rounded_rec) + "\n\n")
if not args.quiet:
print(text)
ap_dictionary[class_name] = ap
n_images = counter_images_per_class[class_name]
lamr, mr, fppi = log_average_miss_rate(np.array(rec), np.array(fp), n_images)
lamr_dictionary[class_name] = lamr
"""
Draw plot
"""
if draw_plot:
plt.plot(rec, prec, '-o')
# add a new penultimate point to the list (mrec[-2], 0.0)
# since the last line segment (and respective area) do not affect the AP value
area_under_curve_x = mrec[:-1] + [mrec[-2]] + [mrec[-1]]
area_under_curve_y = mprec[:-1] + [0.0] + [mprec[-1]]
plt.fill_between(area_under_curve_x, 0, area_under_curve_y, alpha=0.2, edgecolor='r')
# set window title
fig = plt.gcf() # gcf - get current figure
fig.canvas.set_window_title('AP ' + class_name)
# set plot title
plt.title('class: ' + text)
#plt.suptitle('This is a somewhat long figure title', fontsize=16)
# set axis titles
plt.xlabel('Recall')
plt.ylabel('Precision')
# optional - set axes
axes = plt.gca() # gca - get current axes
axes.set_xlim([0.0,1.0])
axes.set_ylim([0.0,1.05]) # .05 to give some extra space
# Alternative option -> wait for button to be pressed
#while not plt.waitforbuttonpress(): pass # wait for key display
# Alternative option -> normal display
#plt.show()
# save the plot
fig.savefig(results_files_path + "/classes/" + class_name + ".png")
plt.cla() # clear axes for next plot
if show_animation:
cv2.destroyAllWindows()
results_file.write("\n# mAP of all classes\n")
mAP = sum_AP / n_classes
text = "mAP = {0:.2f}%".format(mAP*100)
results_file.write(text + "\n")
print(text)
# remove the temp_files directory
shutil.rmtree(TEMP_FILES_PATH)
"""
Count total of detection-results
"""
# iterate through all the files
det_counter_per_class = {}
for txt_file in dr_files_list:
# get lines to list
lines_list = file_lines_to_list(txt_file)
if lines_list[-1] == '':
lines_list.remove( lines_list[-1])
for line in lines_list:
class_name = line.split()[0]
# check if class is in the ignore list, if yes skip
if class_name in args.ignore:
continue
# count that object
if class_name in det_counter_per_class:
det_counter_per_class[class_name] += 1
else:
# if class didn't exist yet
det_counter_per_class[class_name] = 1
#print(det_counter_per_class)
dr_classes = list(det_counter_per_class.keys())
"""
Plot the total number of occurences of each class in the ground-truth
"""
if draw_plot:
window_title = "ground-truth-info"
plot_title = "ground-truth\n"
plot_title += "(" + str(len(ground_truth_files_list)) + " files and " + str(n_classes) + " classes)"
x_label = "Number of objects per class"
output_path = results_files_path + "/ground-truth-info.png"
to_show = False
plot_color = 'forestgreen'
draw_plot_func(
gt_counter_per_class,
n_classes,
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
'',
)
"""
Write number of ground-truth objects per class to results.txt
"""
with open(results_files_path + "/results.txt", 'a') as results_file:
results_file.write("\n# Number of ground-truth objects per class\n")
for class_name in sorted(gt_counter_per_class):
results_file.write(class_name + ": " + str(gt_counter_per_class[class_name]) + "\n")
"""
Finish counting true positives
"""
for class_name in dr_classes:
# if class exists in detection-result but not in ground-truth then there are no true positives in that class
if class_name not in gt_classes:
count_true_positives[class_name] = 0
#print(count_true_positives)
"""
Plot the total number of occurences of each class in the "detection-results" folder
"""
if draw_plot:
window_title = "detection-results-info"
# Plot title
plot_title = "detection-results\n"
plot_title += "(" + str(len(dr_files_list)) + " files and "
count_non_zero_values_in_dictionary = sum(int(x) > 0 for x in list(det_counter_per_class.values()))
plot_title += str(count_non_zero_values_in_dictionary) + " detected classes)"
# end Plot title
x_label = "Number of objects per class"
output_path = results_files_path + "/detection-results-info.png"
to_show = False
plot_color = 'forestgreen'
true_p_bar = count_true_positives
draw_plot_func(
det_counter_per_class,
len(det_counter_per_class),
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
true_p_bar
)
"""
Write number of detected objects per class to results.txt
"""
with open(results_files_path + "/results.txt", 'a') as results_file:
results_file.write("\n# Number of detected objects per class\n")
for class_name in sorted(dr_classes):
n_det = det_counter_per_class[class_name]
text = class_name + ": " + str(n_det)
text += " (tp:" + str(count_true_positives[class_name]) + ""
text += ", fp:" + str(n_det - count_true_positives[class_name]) + ")\n"
results_file.write(text)
"""
Draw log-average miss rate plot (Show lamr of all classes in decreasing order)
"""
if draw_plot:
window_title = "lamr"
plot_title = "log-average miss rate"
x_label = "log-average miss rate"
output_path = results_files_path + "/lamr.png"
to_show = False
plot_color = 'royalblue'
draw_plot_func(
lamr_dictionary,
n_classes,
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
""
)
"""
Draw mAP plot (Show AP's of all classes in decreasing order)
"""
if draw_plot:
window_title = "mAP"
plot_title = "mAP = {0:.2f}%".format(mAP*100)
x_label = "Average Precision"
output_path = results_files_path + "/mAP.png"
to_show = True
plot_color = 'royalblue'
draw_plot_func(
ap_dictionary,
n_classes,
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
""
)
| 38.704289
| 135
| 0.582702
|
eb2d0b19fc6ea15542127b22f6bef1f240b95aac
| 5,778
|
py
|
Python
|
tests/test_download.py
|
TimurPlusPlus/nexus3-cli
|
5c4dc4393ccdc07ad3769e8bb44f3fb20857ffa8
|
[
"MIT"
] | null | null | null |
tests/test_download.py
|
TimurPlusPlus/nexus3-cli
|
5c4dc4393ccdc07ad3769e8bb44f3fb20857ffa8
|
[
"MIT"
] | null | null | null |
tests/test_download.py
|
TimurPlusPlus/nexus3-cli
|
5c4dc4393ccdc07ad3769e8bb44f3fb20857ffa8
|
[
"MIT"
] | null | null | null |
import itertools
import os
import pytest
from faker import Faker
@pytest.mark.parametrize('flatten, remote, destination, x_local_path', [
# no rename (file to dir)
(False, 'file', '.', '_TMP_file'),
(False, 'file', './', '_TMP_file'),
(False, 'file', '..', '_TMP_../file'),
(False, 'file', '../', '_TMP_../file'),
(False, 'file', '/', '/file'),
(False, 'file', '/dir/', '/dir/file'),
(False, 'file', 'dir/', '_TMP_dir/file'),
(False, 'file', 'dir/sub/', '_TMP_dir/sub/file'),
(False, 'file', '/dir/sub/', '/dir/sub/file'),
# rename (file to file)
(False, 'file', 'file2', '_TMP_file2'),
(False, 'file', './file2', '_TMP_file2'),
(False, 'file', '/file2', '/file2'),
(False, 'file', '/dir/file2', '/dir/file2'),
(False, 'file', 'dir/file2', '_TMP_dir/file2'),
# remote has directory, no rename
(False, 'dir/file', '.', '_TMP_dir/file'),
(True, 'dir/file', '.', '_TMP_file'),
(False, 'dir/file', './', '_TMP_dir/file'),
(True, 'dir/file', './', '_TMP_file'),
(False, 'dir/file', '..', '_TMP_../dir/file'),
(True, 'dir/file', '..', '_TMP_../file'),
(False, 'dir/file', '../', '_TMP_../dir/file'),
(True, 'dir/file', '../', '_TMP_../file'),
(False, 'dir/file', '/', '/dir/file'),
(True, 'dir/file', '/', '/file'),
(False, 'dir/file', '/dir/', '/dir/dir/file'),
(True, 'dir/file', '/dir/', '/dir/file'),
(False, 'dir/file', 'dir/', '_TMP_dir/dir/file'),
(True, 'dir/file', 'dir/', '_TMP_dir/file'),
(False, 'dir/file', 'dir/sub/', '_TMP_dir/sub/dir/file'),
(True, 'dir/file', 'dir/sub/', '_TMP_dir/sub/file'),
(False, 'dir/file', '/dir/sub/', '/dir/sub/dir/file'),
(True, 'dir/file', '/dir/sub/', '/dir/sub/file'),
# remote has directory, rename
(False, 'dir1/file', 'file2', '_TMP_dir1/file2'),
(True, 'dir1/file', 'file2', '_TMP_file2'),
(False, 'dir1/file', './file2', '_TMP_dir1/file2'),
(True, 'dir1/file', './file2', '_TMP_file2'),
(False, 'dir1/file', '/file2', '/dir1/file2'),
(True, 'dir1/file', '/file2', '/file2'),
(False, 'dir1/file', '/dir2/file2', '/dir2/dir1/file2'),
(True, 'dir1/file', '/dir2/file2', '/dir2/file2'),
(False, 'dir1/file', 'dir2/file2', '_TMP_dir2/dir1/file2'),
(True, 'dir1/file', 'dir2/file2', '_TMP_dir2/file2'),
])
def test__remote_path_to_local(
flatten, remote, destination, x_local_path, nexus_mock_client, tmpdir):
"""
Ensure the method correctly resolves a remote path to a local destination,
following the instance setting for flatten.
"""
nexus = nexus_mock_client
FLATTEN = flatten
# add cwd to expected result as the fixture gives it as relative but the
# method always returns an absolute path
if x_local_path.find('_TMP_') == 0:
x_local_path = x_local_path.replace('_TMP_', str(tmpdir) + os.path.sep)
with tmpdir.as_cwd():
local_path = nexus._remote_path_to_local(
remote, destination, FLATTEN, create=False)
assert local_path == os.path.realpath(x_local_path)
@pytest.mark.parametrize('is_dst_dir, flatten',
([False, True], [False, True]))
def test__remote_path_to_create(
flatten, is_dst_dir, nexus_mock_client, tmpdir):
"""
Ensure the method correctly resolves a remote path to a local destination,
following the instance setting for flatten.
"""
nexus = nexus_mock_client
fake = Faker()
# use a relative path as destination; another test covers abs/rel paths
local_dst = fake.file_path(depth=fake.random_int(2, 10))[1:]
assert_type = os.path.isfile
if is_dst_dir:
assert_type = os.path.isdir
local_dst += nexus._local_sep
FLATTEN = flatten
with tmpdir.as_cwd():
local_path = nexus._remote_path_to_local(
'a', local_dst, flatten=FLATTEN, create=True)
assert assert_type(local_dst)
assert os.path.isfile(local_path)
@pytest.mark.integration
@pytest.mark.parametrize('dest_dir_end, flatten, nocache',
itertools.product(
('download',
'download/',
'download/intermediate/.',
'download/intermediate/..'),
(False, True),
(False, True)))
def test_download_tree(
nexus_client, deep_file_tree, dest_dir_end,
flatten, faker, nocache, tmpdir):
"""
Create a repository, upload a random file tree to Nexus, download the
same files and check if expected files are downloaded.
Ensure that the download works for the destination specified in
different formats.
"""
src_dir, x_file_set = deep_file_tree
repo = faker.pystr()
dst_dir = faker.uri_path() + '/'
path = dst_dir[:-1] + src_dir
argv = ('repo create hosted raw {}'.format(repo)).split()
pytest.helpers.create_and_inspect(nexus_client, argv, repo)
nexus_client.repositories.refresh()
count_uploaded = nexus_client.upload_directory(src_dir, repo, dst_dir)
file_set_uploaded = pytest.helpers.repo_list(
nexus_client, repo, count_uploaded, path)
download_dest = '{}{}{}'.format(str(tmpdir), os.path.sep, dest_dir_end)
source_path = '{repo}/{dst_dir}'.format(**locals())
count_downloaded = nexus_client.download(
source_path, download_dest, flatten=flatten, nocache=nocache)
assert count_uploaded == count_downloaded
| 39.575342
| 79
| 0.573728
|
894c34478352982699e4b7370f60d96c9eb14199
| 2,352
|
py
|
Python
|
satchmo/accounts/urls.py
|
sankroh/satchmo
|
e48df0c2a4be4ce14785d0a5d6dd1e516c57a838
|
[
"BSD-3-Clause"
] | 1
|
2016-05-09T12:21:04.000Z
|
2016-05-09T12:21:04.000Z
|
satchmo/accounts/urls.py
|
sankroh/satchmo
|
e48df0c2a4be4ce14785d0a5d6dd1e516c57a838
|
[
"BSD-3-Clause"
] | null | null | null |
satchmo/accounts/urls.py
|
sankroh/satchmo
|
e48df0c2a4be4ce14785d0a5d6dd1e516c57a838
|
[
"BSD-3-Clause"
] | null | null | null |
"""
URLConf for Django user registration.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/accounts/'.
"""
from django.conf.urls.defaults import *
from satchmo.configuration import config_value
# extending the urls in contacts
from satchmo.contact.urls import urlpatterns
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]+ because a bad activation key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
urlpatterns += patterns('satchmo.accounts.views',
(r'^activate/(?P<activation_key>\w+)/$', 'activate', {}, 'registration_activate'),
(r'^login/$', 'emaillogin', {'template_name': 'registration/login.html'}, 'auth_login'),
(r'^logout/$', 'shop_logout', {}, 'auth_logout'),
(r'^register/$', 'register', {}, 'registration_register'),
(r'^secure/login/$', 'emaillogin', {'SSL' : True, 'template_name': 'registration/login.html'}, 'auth_secure_login'),
)
verify = (config_value('SHOP', 'ACCOUNT_VERIFICATION') == 'EMAIL')
urlpatterns += patterns('django.views.generic',
(r'^register/complete/$', 'simple.direct_to_template',
{'template': 'registration/registration_complete.html',
'extra_context': {'verify': verify }},
'registration_complete'),
)
#Dictionary for authentication views
password_reset_dict = {
'template_name': 'registration/password_reset_form.html',
'email_template_name': 'registration/password_reset.txt',
}
# the "from email" in password reset is problematic... it is hard coded as None
urlpatterns += patterns('django.contrib.auth.views',
(r'^password_reset/$', 'password_reset', password_reset_dict, 'auth_password_reset'),
(r'^password_reset/done/$', 'password_reset_done', {'template_name':'registration/password_reset_done.html'}, 'auth_password_reset_done'),
(r'^password_change/$', 'password_change', {'template_name':'registration/password_change_form.html'}, 'auth_password_change'),
(r'^password_change/done/$', 'password_change_done', {'template_name':'registration/password_change_done.html'}, 'auth_change_done'),
(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$', 'password_reset_confirm'),
(r'^reset/done/$', 'password_reset_complete'),
)
| 45.230769
| 142
| 0.713435
|
6dcf2a2bb851681d3cab154a8dc1e7fabd12fc60
| 1,993
|
py
|
Python
|
models/BrowserModel.py
|
lunarca/fngrpt
|
11b06456d73502d3ad178520742848c27f074160
|
[
"Apache-2.0"
] | null | null | null |
models/BrowserModel.py
|
lunarca/fngrpt
|
11b06456d73502d3ad178520742848c27f074160
|
[
"Apache-2.0"
] | null | null | null |
models/BrowserModel.py
|
lunarca/fngrpt
|
11b06456d73502d3ad178520742848c27f074160
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from sqlalchemy import Column, ForeignKey, Integer
from sqlalchemy.types import String
from sqlalchemy.orm import relationship, backref
from models import dbsession
from models.BaseModels import DatabaseObject, generate_uuid
class Browser(DatabaseObject):
uuid = Column(String(32), unique=True, default=generate_uuid)
_name = Column(String(32))
_version = Column(String(32))
_codename = Column(String(32))
_platform = Column(String(32))
_user_agent = Column(String(64))
_oscpu = Column(String(32))
# Belongs to Target
target_id = Column(Integer, ForeignKey('target.id'), nullable=False)
target = relationship("Target", backref=backref("browser", lazy="select"))
@classmethod
def all(cls):
return dbsession.query(cls).all()
@classmethod
def by_id(cls, _id):
return dbsession.query(cls).filter_by(id=_id).first()
@classmethod
def by_uuid(cls, _uuid):
return dbsession.query(cls).filter_by(uuid=_uuid).first()
# Properties
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = str(value[:32])
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = str(value[:32])
@property
def codename(self):
return self._codename
@codename.setter
def codename(self, value):
self._codename = str(value[:32])
@property
def platform(self):
return self._platform
@platform.setter
def platform(self, value):
self._platform = str(value[:32])
@property
def user_agent(self):
return self._user_agent
@user_agent.setter
def user_agent(self, value):
self._user_agent = str(value[:64])
@property
def oscpu(self):
return self._oscpu
@oscpu.setter
def oscpu(self, value):
self._oscpu = str(value[:32])
| 23.447059
| 78
| 0.649774
|
8b44825bcb19b5ef9e17e4c345010b0c709cfe2d
| 1,577
|
py
|
Python
|
server/accounts/views.py
|
paulu/opensurfaces
|
7f3e987560faa62cd37f821760683ccd1e053c7c
|
[
"MIT"
] | 137
|
2015-02-19T00:00:42.000Z
|
2022-03-31T03:56:01.000Z
|
server/accounts/views.py
|
paulu/opensurfaces
|
7f3e987560faa62cd37f821760683ccd1e053c7c
|
[
"MIT"
] | 20
|
2015-07-28T23:39:58.000Z
|
2020-05-19T11:40:55.000Z
|
server/accounts/views.py
|
paulu/opensurfaces
|
7f3e987560faa62cd37f821760683ccd1e053c7c
|
[
"MIT"
] | 49
|
2015-02-09T15:21:46.000Z
|
2021-12-15T14:22:33.000Z
|
from django.conf import settings
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
import account.views
from accounts.models import UserProfile
from accounts.forms import SignupForm, LoginUsernameOrEmailForm
@staff_member_required
def admin_shell(request):
return render(request, "admin_shell.html", {
'ADMIN_SHELL_PASSWORD': settings.ADMIN_SHELL_PASSWORD,
'ADMIN_SHELL_PORT': settings.ADMIN_SHELL_PORT,
'SERVER_IP': settings.SERVER_IP
})
class SignupView(account.views.SignupView):
form_class = SignupForm
class SignupViewAjax(account.views.SignupView):
form_class = SignupForm
template_name = 'account/signup_ajax.html'
class LoginView(account.views.LoginView):
form_class = LoginUsernameOrEmailForm
class LoginViewAjax(account.views.LoginView):
form_class = LoginUsernameOrEmailForm
template_name = 'account/login_ajax.html'
## Old, unused stuff:
class UserProfileListView(ListView):
model = UserProfile
def get_context_data(self, **kwargs):
context = super(UserProfileListView, self).get_context_data(**kwargs)
#context['now'] = timezone.now()
return context
class UserProfileDetailView(DetailView):
model = UserProfile
def get_context_data(self, **kwargs):
context = super(UserProfileDetailView, self).get_context_data(**kwargs)
#context['now'] = timezone.now()
return context
| 27.189655
| 79
| 0.755866
|
2fcf482b5314a8acc4adecea4bfdfb58683003fd
| 5,293
|
py
|
Python
|
rubric_sampling/experiments/rubric_utils/load_params.py
|
YangAzure/rubric-sampling-public
|
24e8c6bc154633566f93a20661c67484029c3591
|
[
"MIT"
] | 20
|
2019-01-29T03:21:40.000Z
|
2022-03-04T08:52:24.000Z
|
rubric_sampling/experiments/rubric_utils/load_params.py
|
YangAzure/rubric-sampling-public
|
24e8c6bc154633566f93a20661c67484029c3591
|
[
"MIT"
] | null | null | null |
rubric_sampling/experiments/rubric_utils/load_params.py
|
YangAzure/rubric-sampling-public
|
24e8c6bc154633566f93a20661c67484029c3591
|
[
"MIT"
] | 5
|
2019-08-31T11:49:23.000Z
|
2021-03-18T13:22:58.000Z
|
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import os
import cPickle
import numpy as np
from ..utils import DATASETS_ROOT
def get_label_params(problem_id):
r"""Load constants for a particular problem.
@param problem_id: integer
1|2|3|4|5|6|7|8
"""
if problem_id == 1:
from .p1_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 2:
from .p2_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 3:
from .p3_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 4:
from .p4_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 5:
from .p5_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 6:
from .p6_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 7:
from .p7_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
elif problem_id == 8:
from .p8_utils import IX_TO_LABEL, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
LABEL_TO_IX = {v: k for k, v in IX_TO_LABEL.iteritems()}
N_LABELS = len(IX_TO_LABEL.keys())
return N_LABELS, IX_TO_LABEL, LABEL_TO_IX, LOOP_LABELS_IX, GEOMETRY_LABELS_IX
def get_pcfg_params(problem_id, author='teacher', random=False):
r"""Return parameters for a PCFG for any problem.
@param problem_id: integer
1|2|3|4|5|6|7|8
@param author: string [default: teacher]
teacher|student
use PCFG written by a professor in computer science (teacher)
or an undergraduate teaching assistant (student)
@param random: boolean [default: False]
use random parameters for PCFG; otherwise use the expert
parameters chosen by the author
"""
if problem_id == 1:
from .p1_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 2:
from .p2_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 3:
from .p3_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 4:
from .p4_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 5:
from .p5_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 6:
from .p6_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 7:
from .p7_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
elif problem_id == 8:
from .p8_utils import STUDENT_PCFG_PARAMS, TEACHER_PCFG_PARAMS
if author == 'teacher':
return np.random.rand(len(TEACHER_PCFG_PARAMS)) if random else TEACHER_PCFG_PARAMS
elif author == 'student':
return np.random.rand(len(STUDENT_PCFG_PARAMS)) if random else STUDENT_PCFG_PARAMS
else:
raise Exception('author %s not supported.' % author)
def get_pcfg_path(problem_id, author='teacher'):
r"""Return path to PCFg for any problem.
@param problem_id: integer
1|2|3|4|5|6|7|8
@param author: string [default: teacher]
teacher|student
use PCFG written by a professor in computer science (teacher)
or an undergraduate teaching assistant (student)
"""
if problem_id == 1:
from .p1_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 2:
from .p2_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 3:
from .p3_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 4:
from .p4_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 5:
from .p5_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 6:
from .p6_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 7:
from .p7_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
elif problem_id == 8:
from .p8_utils import STUDENT_PCFG_PATH, TEACHER_PCFG_PATH
return TEACHER_PCFG_PATH if author == 'teacher' else STUDENT_PCFG_PATH
def get_codeorg_data_root(problem_id, dataset='unlabeled'):
r"""Return path to folder containing data.
@param problem_id: integer
1|2|3|4|5|6|7|8
@param dataset: string
unlabeled|annotated|synthetic|raw
"""
return os.path.join(DATASETS_ROOT, 'codeorg', dataset, 'p%d' % problem_id)
def get_length_distribution(problem_id):
data_root = get_codeorg_data_root(problem_id, dataset='unlabeled')
train_path = os.path.join(data_root, 'train.pickle')
with open(train_path) as fp:
data = cPickle.load(fp)
data = data['programs']
counts = []
for row in data:
row = row.replace('\n','')
words = row.split()
counts.append(len(words))
counts = np.array(counts)
return np.bincount(counts)
def get_max_seq_len(problem_id):
length_dist = get_length_distribution(problem_id)
max_seq_len = np.sum(np.cumsum(length_dist) <= np.sum(length_dist) * 0.999)
return max_seq_len
| 37.274648
| 90
| 0.683733
|
e784d1361d2e780b40e9ca8ce567393d59dd590d
| 32,091
|
py
|
Python
|
docs/ext/credmark_autosummary/__init__.py
|
credmark/credmark-model-sdk-py
|
5a649d41738bb9f033792328aef56f0d4b91b7fd
|
[
"MIT"
] | 7
|
2022-03-10T22:28:23.000Z
|
2022-03-31T17:02:16.000Z
|
docs/ext/credmark_autosummary/__init__.py
|
credmark/credmark-model-sdk-py
|
5a649d41738bb9f033792328aef56f0d4b91b7fd
|
[
"MIT"
] | 2
|
2022-03-09T04:11:13.000Z
|
2022-03-24T14:36:14.000Z
|
docs/ext/credmark_autosummary/__init__.py
|
credmark/credmark-model-sdk-py
|
5a649d41738bb9f033792328aef56f0d4b91b7fd
|
[
"MIT"
] | 1
|
2022-03-29T22:42:07.000Z
|
2022-03-29T22:42:07.000Z
|
"""Extension that adds an autosummary:: directive.
The directive can be used to generate function/method/attribute/etc. summary
lists, similar to those output eg. by Epydoc and other API doc generation tools.
An :autolink: role is also provided.
autosummary directive
---------------------
The autosummary directive has the form::
.. autosummary::
:nosignatures:
:toctree: generated/
module.function_1
module.function_2
...
and it generates an output table (containing signatures, optionally)
======================== =============================================
module.function_1(args) Summary line from the docstring of function_1
module.function_2(args) Summary line from the docstring
...
======================== =============================================
If the :toctree: option is specified, files matching the function names
are inserted to the toctree with the given prefix:
generated/module.function_1
generated/module.function_2
...
Note: The file names contain the module:: or currentmodule:: prefixes.
.. seealso:: autosummary_generate.py
autolink role
-------------
The autolink role functions as ``:obj:`` when the name referred can be
resolved to a Python object, and otherwise it becomes simple emphasis.
This can be used as the default role to make links 'smart'.
"""
import inspect
import os
import posixpath
import re
import sys
import warnings
from inspect import Parameter
from os import path
from types import ModuleType
from typing import Any, Dict, List, Optional, Sequence, Tuple, Type, cast
from docutils import nodes
from docutils.nodes import Element, Node, system_message
from docutils.parsers.rst import directives
from docutils.parsers.rst.states import RSTStateMachine, Struct, state_classes
from docutils.statemachine import StringList
import sphinx
from sphinx import addnodes
from sphinx.application import Sphinx
from sphinx.config import Config
from sphinx.deprecation import (RemovedInSphinx50Warning, RemovedInSphinx60Warning,
deprecated_alias)
from sphinx.environment import BuildEnvironment
from sphinx.environment.adapters.toctree import TocTree
from sphinx.ext.autodoc import INSTANCEATTR, Documenter
from sphinx.ext.autodoc.directive import DocumenterBridge, Options
from sphinx.ext.autodoc.importer import import_module
from sphinx.ext.autodoc.mock import mock
from sphinx.locale import __
from sphinx.project import Project
from sphinx.pycode import ModuleAnalyzer, PycodeError
from sphinx.registry import SphinxComponentRegistry
from sphinx.util import logging, rst
from sphinx.util.docutils import (NullReporter, SphinxDirective, SphinxRole, new_document,
switch_source_input)
from sphinx.util.inspect import signature_from_str
from sphinx.util.matching import Matcher
from sphinx.util.typing import OptionSpec
from sphinx.writers.html import HTMLTranslator
logger = logging.getLogger(__name__)
periods_re = re.compile(r'\.(?:\s+)')
literal_re = re.compile(r'::\s*$')
WELL_KNOWN_ABBREVIATIONS = ('et al.', ' i.e.',)
# -- autosummary_toc node ------------------------------------------------------
class autosummary_toc(nodes.comment):
pass
def process_autosummary_toc(app: Sphinx, doctree: nodes.document) -> None:
"""Insert items described in autosummary:: to the TOC tree, but do
not generate the toctree:: list.
"""
warnings.warn('process_autosummary_toc() is deprecated',
RemovedInSphinx50Warning, stacklevel=2)
env = app.builder.env
crawled = {}
def crawl_toc(node: Element, depth: int = 1) -> None:
crawled[node] = True
for subnode in node:
try:
if (isinstance(subnode, autosummary_toc) and
isinstance(subnode[0], addnodes.toctree)):
TocTree(env).note(env.docname, subnode[0])
continue
except IndexError:
continue
if not isinstance(subnode, nodes.section):
continue
if subnode not in crawled:
crawl_toc(subnode, depth + 1)
crawl_toc(doctree)
def autosummary_toc_visit_html(self: nodes.NodeVisitor, node: autosummary_toc) -> None:
"""Hide autosummary toctree list in HTML output."""
raise nodes.SkipNode
def autosummary_noop(self: nodes.NodeVisitor, node: Node) -> None:
pass
# -- autosummary_table node ----------------------------------------------------
class autosummary_table(nodes.comment):
pass
def autosummary_table_visit_html(self: HTMLTranslator, node: autosummary_table) -> None:
"""Make the first column of the table non-breaking."""
try:
table = cast(nodes.table, node[0])
tgroup = cast(nodes.tgroup, table[0])
tbody = cast(nodes.tbody, tgroup[-1])
rows = cast(List[nodes.row], tbody)
for row in rows:
col1_entry = cast(nodes.entry, row[0])
par = cast(nodes.paragraph, col1_entry[0])
for j, subnode in enumerate(list(par)):
if isinstance(subnode, nodes.Text):
new_text = subnode.astext().replace(" ", "\u00a0")
par[j] = nodes.Text(new_text)
except IndexError:
pass
# -- autodoc integration -------------------------------------------------------
# deprecated_alias('sphinx.ext.autosummary',
# {
# '_app': None,
# },
# RemovedInSphinx60Warning,
# {
# })
class FakeApplication:
def __init__(self):
self.doctreedir = None
self.events = None
self.extensions = {}
self.srcdir = None
self.config = Config()
self.project = Project(None, None)
self.registry = SphinxComponentRegistry()
class FakeDirective(DocumenterBridge):
def __init__(self) -> None:
settings = Struct(tab_width=8)
document = Struct(settings=settings)
app = FakeApplication()
app.config.add('autodoc_class_signature', 'mixed', True, None)
env = BuildEnvironment(app) # type: ignore
state = Struct(document=document)
super().__init__(env, None, Options(), 0, state)
def get_documenter(app: Sphinx, obj: Any, parent: Any) -> Type[Documenter]:
"""Get an autodoc.Documenter class suitable for documenting the given
object.
*obj* is the Python object to be documented, and *parent* is an
another Python object (e.g. a module or a class) to which *obj*
belongs to.
"""
from sphinx.ext.autodoc import DataDocumenter, ModuleDocumenter
if inspect.ismodule(obj):
# ModuleDocumenter.can_document_member always returns False
return ModuleDocumenter
# Construct a fake documenter for *parent*
if parent is not None:
parent_doc_cls = get_documenter(app, parent, None)
else:
parent_doc_cls = ModuleDocumenter
if hasattr(parent, '__name__'):
parent_doc = parent_doc_cls(FakeDirective(), parent.__name__)
else:
parent_doc = parent_doc_cls(FakeDirective(), "")
# Get the correct documenter class for *obj*
classes = [cls for cls in app.registry.documenters.values()
if cls.can_document_member(obj, '', False, parent_doc)]
if classes:
classes.sort(key=lambda cls: cls.priority)
return classes[-1]
else:
return DataDocumenter
# -- .. autosummary:: ----------------------------------------------------------
class Autosummary(SphinxDirective):
"""
Pretty table containing short signatures and summaries of functions etc.
autosummary can also optionally generate a hidden toctree:: node.
"""
required_arguments = 0
optional_arguments = 0
final_argument_whitespace = False
has_content = True
option_spec: OptionSpec = {
'caption': directives.unchanged_required,
'toctree': directives.unchanged,
'nosignatures': directives.flag,
'recursive': directives.flag,
'template': directives.unchanged,
}
def run(self) -> List[Node]:
self.bridge = DocumenterBridge(self.env, self.state.document.reporter,
Options(), self.lineno, self.state)
names = [x.strip().split()[0] for x in self.content
if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])]
items = self.get_items(names)
nodes = self.get_table(items)
if 'toctree' in self.options:
dirname = posixpath.dirname(self.env.docname)
tree_prefix = self.options['toctree'].strip()
docnames = []
excluded = Matcher(self.config.exclude_patterns)
filename_map = self.config.autosummary_filename_map
for _name, _sig, _summary, real_name in items:
real_name = filename_map.get(real_name, real_name)
docname = posixpath.join(tree_prefix, real_name)
docname = posixpath.normpath(posixpath.join(dirname, docname))
if docname not in self.env.found_docs:
if excluded(self.env.doc2path(docname, None)):
msg = __('autosummary references excluded document %r. Ignored.')
else:
msg = __('autosummary: stub file not found %r. '
'Check your autosummary_generate setting.')
logger.warning(msg, real_name, location=self.get_location())
continue
docnames.append(docname)
if docnames:
tocnode = addnodes.toctree()
tocnode['includefiles'] = docnames
tocnode['entries'] = [(None, docn) for docn in docnames]
tocnode['maxdepth'] = -1
tocnode['glob'] = None
tocnode['caption'] = self.options.get('caption')
nodes.append(autosummary_toc('', '', tocnode))
if 'toctree' not in self.options and 'caption' in self.options:
logger.warning(__('A captioned autosummary requires :toctree: option. ignored.'),
location=nodes[-1])
return nodes
def import_by_name(self, name: str, prefixes: List[str]) -> Tuple[str, Any, Any, str]:
with mock(self.config.autosummary_mock_imports):
try:
return import_by_name(name, prefixes, grouped_exception=True)
except ImportExceptionGroup as exc:
# check existence of instance attribute
try:
return import_ivar_by_name(name, prefixes)
except ImportError as exc2:
if exc2.__cause__:
errors: List[BaseException] = exc.exceptions + [exc2.__cause__]
else:
errors = exc.exceptions + [exc2]
raise ImportExceptionGroup(exc.args[0], errors)
def create_documenter(self, app: Sphinx, obj: Any,
parent: Any, full_name: str) -> "Documenter":
"""Get an autodoc.Documenter class suitable for documenting the given
object.
Wraps get_documenter and is meant as a hook for extensions.
"""
doccls = get_documenter(app, obj, parent)
return doccls(self.bridge, full_name)
def get_items(self, names: List[str]) -> List[Tuple[str, str, str, str]]:
"""Try to import the given names, and return a list of
``[(name, signature, summary_string, real_name), ...]``.
"""
prefixes = get_import_prefixes_from_env(self.env)
items: List[Tuple[str, str, str, str]] = []
max_item_chars = 50
for name in names:
display_name = name
if name.startswith('~'):
name = name[1:]
display_name = name.split('.')[-1]
try:
real_name, obj, parent, modname = self.import_by_name(name, prefixes=prefixes)
except ImportExceptionGroup as exc:
errors = list(set("* %s: %s" % (type(e).__name__, e) for e in exc.exceptions))
logger.warning(__('autosummary: failed to import %s.\nPossible hints:\n%s'),
name, '\n'.join(errors), location=self.get_location())
continue
self.bridge.result = StringList() # initialize for each documenter
full_name = real_name
if not isinstance(obj, ModuleType):
# give explicitly separated module name, so that members
# of inner classes can be documented
full_name = modname + '::' + full_name[len(modname) + 1:]
# NB. using full_name here is important, since Documenters
# handle module prefixes slightly differently
documenter = self.create_documenter(self.env.app, obj, parent, full_name)
if not documenter.parse_name():
logger.warning(__('failed to parse name %s'), real_name,
location=self.get_location())
items.append((display_name, '', '', real_name))
continue
if not documenter.import_object():
logger.warning(__('failed to import object %s'), real_name,
location=self.get_location())
items.append((display_name, '', '', real_name))
continue
# try to also get a source code analyzer for attribute docs
try:
documenter.analyzer = ModuleAnalyzer.for_module(
documenter.get_real_modname())
# parse right now, to get PycodeErrors on parsing (results will
# be cached anyway)
documenter.analyzer.find_attr_docs()
except PycodeError as err:
logger.debug('[autodoc] module analyzer failed: %s', err)
# no source file -- e.g. for builtin and C modules
documenter.analyzer = None
# -- Grab the signature
try:
sig = documenter.format_signature(show_annotation=False)
except TypeError:
# the documenter does not support ``show_annotation`` option
sig = documenter.format_signature()
if not sig:
sig = ''
else:
max_chars = max(10, max_item_chars - len(display_name))
sig = mangle_signature(sig, max_chars=max_chars)
# -- Grab the summary
documenter.add_content(None)
summary = extract_summary(self.bridge.result.data[:], self.state.document)
items.append((display_name, sig, summary, real_name))
return items
def get_table(self, items: List[Tuple[str, str, str, str]]) -> List[Node]:
"""Generate a proper list of table nodes for autosummary:: directive.
*items* is a list produced by :meth:`get_items`.
"""
table_spec = addnodes.tabular_col_spec()
table_spec['spec'] = r'\X{1}{2}\X{1}{2}'
table = autosummary_table('')
real_table = nodes.table('', classes=['autosummary longtable'])
table.append(real_table)
group = nodes.tgroup('', cols=2)
real_table.append(group)
group.append(nodes.colspec('', colwidth=10))
group.append(nodes.colspec('', colwidth=90))
body = nodes.tbody('')
group.append(body)
def append_row(*column_texts: str) -> None:
row = nodes.row('')
source, line = self.state_machine.get_source_and_line()
for text in column_texts:
node = nodes.paragraph('')
vl = StringList()
vl.append(text, '%s:%d:<autosummary>' % (source, line))
with switch_source_input(self.state, vl):
self.state.nested_parse(vl, 0, node)
try:
if isinstance(node[0], nodes.paragraph):
node = node[0]
except IndexError:
pass
row.append(nodes.entry('', node))
body.append(row)
for name, sig, summary, real_name in items:
qualifier = 'obj'
if 'nosignatures' not in self.options:
col1 = ':py:%s:`%s <%s>`\\ %s' % (qualifier, name, real_name, rst.escape(sig))
else:
col1 = ':py:%s:`%s <%s>`' % (qualifier, name, real_name)
col2 = summary
append_row(col1, col2)
return [table_spec, table]
def strip_arg_typehint(s: str) -> str:
"""Strip a type hint from argument definition."""
return s.split(':')[0].strip()
def _cleanup_signature(s: str) -> str:
"""Clean up signature using inspect.signautre() for mangle_signature()"""
try:
sig = signature_from_str(s)
parameters = list(sig.parameters.values())
for i, param in enumerate(parameters):
if param.annotation is not Parameter.empty:
# Remove typehints
param = param.replace(annotation=Parameter.empty)
if param.default is not Parameter.empty:
# Replace default value by "None"
param = param.replace(default=None)
parameters[i] = param
sig = sig.replace(parameters=parameters, return_annotation=Parameter.empty)
return str(sig)
except Exception:
# Return the original signature string if failed to clean (ex. parsing error)
return s
def mangle_signature(sig: str, max_chars: int = 30) -> str:
"""Reformat a function signature to a more compact form."""
s = _cleanup_signature(sig)
# Strip return type annotation
s = re.sub(r"\)\s*->\s.*$", ")", s)
# Remove parenthesis
s = re.sub(r"^\((.*)\)$", r"\1", s).strip()
# Strip literals (which can contain things that confuse the code below)
s = re.sub(r"\\\\", "", s) # escaped backslash (maybe inside string)
s = re.sub(r"\\'", "", s) # escaped single quote
s = re.sub(r'\\"', "", s) # escaped double quote
s = re.sub(r"'[^']*'", "", s) # string literal (w/ single quote)
s = re.sub(r'"[^"]*"', "", s) # string literal (w/ double quote)
# Strip complex objects (maybe default value of arguments)
while re.search(r'\([^)]*\)', s): # contents of parenthesis (ex. NamedTuple(attr=...))
s = re.sub(r'\([^)]*\)', '', s)
while re.search(r'<[^>]*>', s): # contents of angle brackets (ex. <object>)
s = re.sub(r'<[^>]*>', '', s)
while re.search(r'{[^}]*}', s): # contents of curly brackets (ex. dict)
s = re.sub(r'{[^}]*}', '', s)
# Parse the signature to arguments + options
args: List[str] = []
opts: List[str] = []
opt_re = re.compile(r"^(.*, |)([a-zA-Z0-9_*]+)\s*=\s*")
while s:
m = opt_re.search(s)
if not m:
# The rest are arguments
args = s.split(', ')
break
opts.insert(0, m.group(2))
s = m.group(1)[:-2]
# Strip typehints
for i, arg in enumerate(args):
args[i] = strip_arg_typehint(arg)
for i, opt in enumerate(opts):
opts[i] = strip_arg_typehint(opt)
# Produce a more compact signature
sig = limited_join(", ", args, max_chars=max_chars - 2)
if opts:
if not sig:
sig = "[%s]" % limited_join(", ", opts, max_chars=max_chars - 4)
elif len(sig) < max_chars - 4 - 2 - 3:
sig += "[, %s]" % limited_join(", ", opts,
max_chars=max_chars - len(sig) - 4 - 2)
return "(%s)" % sig
def extract_summary(doc: List[str], document: Any) -> str:
"""Extract summary from docstring."""
def parse(doc: List[str], settings: Any) -> nodes.document:
state_machine = RSTStateMachine(state_classes, 'Body')
node = new_document('', settings)
node.reporter = NullReporter()
state_machine.run(doc, node)
return node
# Skip a blank lines at the top
while doc and not doc[0].strip():
doc.pop(0)
# If there's a blank line, then we can assume the first sentence /
# paragraph has ended, so anything after shouldn't be part of the
# summary
for i, piece in enumerate(doc):
if not piece.strip():
doc = doc[:i]
break
if doc == []:
return ''
# parse the docstring
node = parse(doc, document.settings)
if isinstance(node[0], nodes.section):
# document starts with a section heading, so use that.
summary = node[0].astext().strip()
elif not isinstance(node[0], nodes.paragraph):
# document starts with non-paragraph: pick up the first line
summary = doc[0].strip()
else:
# Try to find the "first sentence", which may span multiple lines
sentences = periods_re.split(" ".join(doc))
if len(sentences) == 1:
summary = sentences[0].strip()
else:
summary = ''
for i in range(len(sentences)):
summary = ". ".join(sentences[:i + 1]).rstrip(".") + "."
node[:] = []
node = parse(doc, document.settings)
if summary.endswith(WELL_KNOWN_ABBREVIATIONS):
pass
elif not any(node.findall(nodes.system_message)):
# considered as that splitting by period does not break inline markups
break
# strip literal notation mark ``::`` from tail of summary
summary = literal_re.sub('.', summary)
return summary
def limited_join(sep: str, items: List[str], max_chars: int = 30,
overflow_marker: str = "...") -> str:
"""Join a number of strings into one, limiting the length to *max_chars*.
If the string overflows this limit, replace the last fitting item by
*overflow_marker*.
Returns: joined_string
"""
full_str = sep.join(items)
if len(full_str) < max_chars:
return full_str
n_chars = 0
n_items = 0
for item in items:
n_chars += len(item) + len(sep)
if n_chars < max_chars - len(overflow_marker):
n_items += 1
else:
break
return sep.join(list(items[:n_items]) + [overflow_marker])
# -- Importing items -----------------------------------------------------------
class ImportExceptionGroup(Exception):
"""Exceptions raised during importing the target objects.
It contains an error messages and a list of exceptions as its arguments.
"""
def __init__(self, message: Optional[str], exceptions: Sequence[BaseException]):
super().__init__(message)
self.exceptions = list(exceptions)
def get_import_prefixes_from_env(env: BuildEnvironment) -> List[str]:
"""
Obtain current Python import prefixes (for `import_by_name`)
from ``document.env``
"""
prefixes: List[Optional[str]] = [None]
currmodule = env.ref_context.get('py:module')
if currmodule:
prefixes.insert(0, currmodule)
currclass = env.ref_context.get('py:class')
if currclass:
if currmodule:
prefixes.insert(0, currmodule + "." + currclass)
else:
prefixes.insert(0, currclass)
return prefixes
def import_by_name(name: str, prefixes: List[str] = [None], grouped_exception: bool = False
) -> Tuple[str, Any, Any, str]:
"""Import a Python object that has the given *name*, under one of the
*prefixes*. The first name that succeeds is used.
"""
tried = []
errors: List[ImportExceptionGroup] = []
for prefix in prefixes:
try:
if prefix:
prefixed_name = '.'.join([prefix, name])
else:
prefixed_name = name
obj, parent, modname = _import_by_name(prefixed_name, grouped_exception)
return prefixed_name, obj, parent, modname
except ImportError:
tried.append(prefixed_name)
except ImportExceptionGroup as exc:
tried.append(prefixed_name)
errors.append(exc)
if grouped_exception:
exceptions: List[BaseException] = sum((e.exceptions for e in errors), [])
raise ImportExceptionGroup('no module named %s' % ' or '.join(tried), exceptions)
else:
raise ImportError('no module named %s' % ' or '.join(tried))
def _import_by_name(name: str, grouped_exception: bool = False) -> Tuple[Any, Any, str]:
"""Import a Python object given its full name."""
errors: List[BaseException] = []
try:
name_parts = name.split('.')
# try first interpret `name` as MODNAME.OBJ
modname = '.'.join(name_parts[:-1])
if modname:
try:
mod = import_module(modname)
return getattr(mod, name_parts[-1]), mod, modname
except (ImportError, IndexError, AttributeError) as exc:
errors.append(exc.__cause__ or exc)
# ... then as MODNAME, MODNAME.OBJ1, MODNAME.OBJ1.OBJ2, ...
last_j = 0
modname = None
for j in reversed(range(1, len(name_parts) + 1)):
last_j = j
modname = '.'.join(name_parts[:j])
try:
import_module(modname)
except ImportError as exc:
errors.append(exc.__cause__ or exc)
if modname in sys.modules:
break
if last_j < len(name_parts):
parent = None
obj = sys.modules[modname]
for obj_name in name_parts[last_j:]:
parent = obj
obj = getattr(obj, obj_name)
return obj, parent, modname
else:
return sys.modules[modname], None, modname
except (ValueError, ImportError, AttributeError, KeyError) as exc:
errors.append(exc)
if grouped_exception:
raise ImportExceptionGroup('', errors)
else:
raise ImportError(*exc.args) from exc
def import_ivar_by_name(name: str, prefixes: List[str] = [None],
grouped_exception: bool = False) -> Tuple[str, Any, Any, str]:
"""Import an instance variable that has the given *name*, under one of the
*prefixes*. The first name that succeeds is used.
"""
try:
name, attr = name.rsplit(".", 1)
real_name, obj, parent, modname = import_by_name(name, prefixes, grouped_exception)
qualname = real_name.replace(modname + ".", "")
analyzer = ModuleAnalyzer.for_module(getattr(obj, '__module__', modname))
analyzer.analyze()
# check for presence in `annotations` to include dataclass attributes
if (qualname, attr) in analyzer.attr_docs or (qualname, attr) in analyzer.annotations:
return real_name + "." + attr, INSTANCEATTR, obj, modname
except (ImportError, ValueError, PycodeError) as exc:
raise ImportError from exc
except ImportExceptionGroup:
raise # pass through it as is
raise ImportError
# -- :autolink: (smart default role) -------------------------------------------
class AutoLink(SphinxRole):
"""Smart linking role.
Expands to ':obj:`text`' if `text` is an object that can be imported;
otherwise expands to '*text*'.
"""
def run(self) -> Tuple[List[Node], List[system_message]]:
pyobj_role = self.env.get_domain('py').role('obj')
objects, errors = pyobj_role('obj', self.rawtext, self.text, self.lineno,
self.inliner, self.options, self.content)
if errors:
return objects, errors
assert len(objects) == 1
pending_xref = cast(addnodes.pending_xref, objects[0])
try:
# try to import object by name
prefixes = get_import_prefixes_from_env(self.env)
import_by_name(pending_xref['reftarget'], prefixes, grouped_exception=True)
except ImportExceptionGroup:
literal = cast(nodes.literal, pending_xref[0])
objects[0] = nodes.emphasis(self.rawtext, literal.astext(),
classes=literal['classes'])
return objects, errors
def get_rst_suffix(app: Sphinx) -> str:
def get_supported_format(suffix: str) -> Tuple[str, ...]:
parser_class = app.registry.get_source_parsers().get(suffix)
if parser_class is None:
return ('restructuredtext',)
return parser_class.supported
suffix: str = None
for suffix in app.config.source_suffix:
if 'restructuredtext' in get_supported_format(suffix):
return suffix
return None
def process_generate_options(app: Sphinx) -> None:
genfiles = app.config.autosummary_generate
if genfiles is True:
env = app.builder.env
genfiles = [env.doc2path(x, base=None) for x in env.found_docs
if os.path.isfile(env.doc2path(x))]
elif genfiles is False:
pass
else:
ext = list(app.config.source_suffix)
genfiles = [genfile + (ext[0] if not genfile.endswith(tuple(ext)) else '')
for genfile in genfiles]
for entry in genfiles[:]:
if not path.isfile(path.join(app.srcdir, entry)):
logger.warning(__('autosummary_generate: file not found: %s'), entry)
genfiles.remove(entry)
if not genfiles:
return
suffix = get_rst_suffix(app)
if suffix is None:
logger.warning(__('autosummary generats .rst files internally. '
'But your source_suffix does not contain .rst. Skipped.'))
return
# credmark_autosummary change:
from .generate import generate_autosummary_docs
imported_members = app.config.autosummary_imported_members
with mock(app.config.autosummary_mock_imports):
generate_autosummary_docs(genfiles, suffix=suffix, base_path=app.srcdir,
app=app, imported_members=imported_members,
overwrite=app.config.autosummary_generate_overwrite,
encoding=app.config.source_encoding)
def setup(app: Sphinx) -> Dict[str, Any]:
# I need autodoc
app.setup_extension('sphinx.ext.autodoc')
app.add_node(autosummary_toc,
html=(autosummary_toc_visit_html, autosummary_noop),
latex=(autosummary_noop, autosummary_noop),
text=(autosummary_noop, autosummary_noop),
man=(autosummary_noop, autosummary_noop),
texinfo=(autosummary_noop, autosummary_noop))
app.add_node(autosummary_table,
html=(autosummary_table_visit_html, autosummary_noop),
latex=(autosummary_noop, autosummary_noop),
text=(autosummary_noop, autosummary_noop),
man=(autosummary_noop, autosummary_noop),
texinfo=(autosummary_noop, autosummary_noop))
app.add_directive('autosummary', Autosummary)
app.add_role('autolink', AutoLink())
app.connect('builder-inited', process_generate_options)
app.add_config_value('autosummary_context', {}, True)
app.add_config_value('autosummary_filename_map', {}, 'html')
app.add_config_value('autosummary_generate', True, True, [bool, list])
app.add_config_value('autosummary_generate_overwrite', True, False)
app.add_config_value('autosummary_mock_imports',
lambda config: config.autodoc_mock_imports, 'env')
app.add_config_value('autosummary_imported_members', [], False, [bool])
app.add_config_value('autosummary_ignore_module_all', True, 'env', bool)
return {'version': sphinx.__display_version__, 'parallel_read_safe': True}
| 37.271777
| 94
| 0.594653
|
4dda2477dfbfcdc6ef507f27602b2009d138ba4a
| 9,147
|
py
|
Python
|
fabfile.py
|
petarmaric/fsm_eigenvalue_experiments
|
80842eb2387d38360e78b9051af8c98466a18079
|
[
"BSD-3-Clause"
] | null | null | null |
fabfile.py
|
petarmaric/fsm_eigenvalue_experiments
|
80842eb2387d38360e78b9051af8c98466a18079
|
[
"BSD-3-Clause"
] | null | null | null |
fabfile.py
|
petarmaric/fsm_eigenvalue_experiments
|
80842eb2387d38360e78b9051af8c98466a18079
|
[
"BSD-3-Clause"
] | null | null | null |
import fnmatch
import os
import shutil
from fabric.api import local, task
from fsm_eigenvalue.load import linspace_with_step
import tables as tb
ANALYSES_TYPES = {
'fsm_damage_analysis': {
'program_args_fmt': "%(results_file)s --report_file %(report_file)s",
'report_file_ext': 'pdf',
'variations': {
'*/*.hdf5': [
{},
],
},
},
'fsm_modal_analysis': {
'program_args_fmt': "%(results_file)s --report_file %(report_file)s",
'report_file_ext': 'pdf',
'variations': {
'*/*.hdf5': [
{},
],
'barbero/*.hdf5': [
{'a-max': 1000.0,},
{'a-max': 1500.0,},
{'t_b-min': 4.0, 't_b-max': 8.0,},
{'t_b-min': 4.0, 't_b-max': 8.0, 'a-min': 2000.0,},
{'t_b-min': 4.0, 't_b-max': 7.0, 'a-min': 200.0, 'a-max': 800.0,},
],
},
},
'fsm_strip_length_analysis': {
'program_args_fmt': "%(results_file)s --report_file %(report_file)s",
'report_file_ext': 'pdf',
'variations': {
'barbero/*.hdf5': [
{'t_b': 6.35, 'markers': 2310.00,},
{'t_b': 6.35, 'add-automatic-markers': '',},
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 195.00, 'a-max': 198.00,}, # mode 2 to 3
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 276.00, 'a-max': 280.00,}, # mode 3 to 4
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 356.00, 'a-max': 362.00,}, # mode 4 to 5
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 436.00, 'a-max': 443.00,}, # mode 5 to 6
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 516.00, 'a-max': 524.00,}, # mode 6 to 7
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 596.00, 'a-max': 605.00,}, # mode 7 to 8
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 676.00, 'a-max': 685.00,}, # mode 8 to 9
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 756.00, 'a-max': 766.00,}, # mode 9 to 10
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 835.00, 'a-max': 847.00,}, # mode 10 to 11
{'t_b': 6.35, 'add-automatic-markers': '', 'a-min': 900.00, 'a-max': 970.00,}, # mode 11 to 1
],
'barbero_mode-transitions/*.hdf5': [
{'t_b': 6.35, 'add-automatic-markers': '',},
],
},
},
'fsm_strip_thickness_analysis': {
'program_args_fmt': "%(results_file)s --report_file %(report_file)s",
'report_file_ext': 'pdf',
'variations': {
'barbero/*.hdf5': [
{'a': 2310, 'add-automatic-markers': '',},
],
'barbero/barbero-elastic.hdf5': [
{'a': 198.0, 'add-automatic-markers': '',}, # mode 2 to 3, as per 'barbero' elastic model
{'a': 279.5, 'add-automatic-markers': '',}, # mode 3 to 4, as per 'barbero' elastic model
{'a': 361.0, 'add-automatic-markers': '',}, # mode 4 to 5, as per 'barbero' elastic model
{'a': 442.0, 'add-automatic-markers': '',}, # mode 5 to 6, as per 'barbero' elastic model
{'a': 523.0, 'add-automatic-markers': '',}, # mode 6 to 7, as per 'barbero' elastic model
{'a': 603.5, 'add-automatic-markers': '',}, # mode 7 to 8, as per 'barbero' elastic model
{'a': 684.5, 'add-automatic-markers': '',}, # mode 8 to 9, as per 'barbero' elastic model
{'a': 765.0, 'add-automatic-markers': '',}, # mode 9 to 10, as per 'barbero' elastic model
{'a': 846.0, 'add-automatic-markers': '',}, # mode 10 to 11, as per 'barbero' elastic model
{'a': 968.5, 'add-automatic-markers': '',}, # mode 11 to 1, as per 'barbero' elastic model
],
'barbero/barbero-viscoelastic.hdf5': [
{'a': 196.0, 'add-automatic-markers': '',}, # mode 2 to 3, as per 'barbero' viscoelastic model
{'a': 276.5, 'add-automatic-markers': '',}, # mode 3 to 4, as per 'barbero' viscoelastic model
{'a': 357.0, 'add-automatic-markers': '',}, # mode 4 to 5, as per 'barbero' viscoelastic model
{'a': 437.5, 'add-automatic-markers': '',}, # mode 5 to 6, as per 'barbero' viscoelastic model
{'a': 517.5, 'add-automatic-markers': '',}, # mode 6 to 7, as per 'barbero' viscoelastic model
{'a': 597.5, 'add-automatic-markers': '',}, # mode 7 to 8, as per 'barbero' viscoelastic model
{'a': 677.5, 'add-automatic-markers': '',}, # mode 8 to 9, as per 'barbero' viscoelastic model
{'a': 757.0, 'add-automatic-markers': '',}, # mode 9 to 10, as per 'barbero' viscoelastic model
{'a': 837.0, 'add-automatic-markers': '',}, # mode 10 to 11, as per 'barbero' viscoelastic model
{'a': 903.0, 'add-automatic-markers': '',}, # mode 11 to 1, as per 'barbero' viscoelastic model
],
},
},
'fsm_strip_thickness_damage_analysis': {
'program_args_fmt': "%(results_file)s --report_file %(report_file)s",
'report_file_ext': 'pdf',
'variations': {
'barbero/*.hdf5': [
{'a': 2310, 'add-automatic-markers': '',},
],
'barbero_mode-transitions/barbero-viscoelastic_mode-2-to-3.hdf5': [
{'a': a, 'add-automatic-markers': '',}
for a in linspace_with_step(195.0, 198.0, 0.5)
],
'barbero_mode-transitions/barbero-viscoelastic_mode-10-to-11.hdf5': [
{'a': a, 'add-automatic-markers': '',}
for a in linspace_with_step(835.0, 846.0, 0.5)
],
},
},
}
def find_filenames(top_dir, filename_pattern):
for path, _, file_list in os.walk(top_dir):
for name in fnmatch.filter(file_list, filename_pattern):
yield os.path.join(path, name)
def is_experiment_computed(results_file):
try:
return tb.is_hdf5_file(results_file)
except (IOError, tb.HDF5ExtError):
return False
@task
def compute_experiment(data_file, force=False):
base_name = os.path.splitext(data_file)[0]
log_file = base_name + '.log'
results_file = base_name + '.hdf5'
if force or not is_experiment_computed(results_file):
local(
"fsm_eigenvalue %s --results-file %s |& tee %s" % (data_file, results_file, log_file),
shell='bash',
)
else:
print "Experiment '%s' has already been computed, skipping." % data_file
@task
def compute_all_experiments(top_dir='.', force=False):
for data_file in find_filenames(top_dir, '*.yaml'):
compute_experiment(data_file, force=force)
@task
def clean_all_analyses_reports(top_dir='.', force_analysis_type=''):
results_dirs = set(os.path.dirname(results_file) for results_file in find_filenames(top_dir, '*.hdf5'))
for results_dir in results_dirs:
for analysis_type in ANALYSES_TYPES:
reports_dir = os.path.join(results_dir, analysis_type)
if os.path.exists(reports_dir):
print "Deleting the '%s' analyses reports directory..." % reports_dir
shutil.rmtree(reports_dir)
@task
def run_single_analysis_type(results_file, analysis_type):
reports_dir = os.path.join(os.path.dirname(results_file), analysis_type)
reports_base_name = os.path.join(reports_dir, os.path.splitext(os.path.basename(results_file))[0])
if not os.path.exists(reports_dir):
os.mkdir(reports_dir)
analysis_settings = ANALYSES_TYPES[analysis_type]
for path_pattern, variation_group in analysis_settings['variations'].items():
if not fnmatch.fnmatch(results_file, '*/' + path_pattern):
continue
for variation_dict in variation_group:
sorted_variation_items = sorted(variation_dict.items())
variation_filename_part = ','.join("%s=%s" % (k, v) for k, v in sorted_variation_items)
report_file = "%s%s.%s" % (
reports_base_name,
'@' + variation_filename_part if variation_filename_part else '',
analysis_settings['report_file_ext']
)
program_args = analysis_settings['program_args_fmt'] % locals()
variation_program_args = ' '.join("--%s %s" % (k, v) for k, v in sorted_variation_items)
local("%s %s %s" % (analysis_type, program_args, variation_program_args))
@task
def run_analyses(results_file, force_analysis_type=''):
analyses_types = ANALYSES_TYPES if not force_analysis_type else (force_analysis_type,)
for analysis_type in analyses_types:
run_single_analysis_type(results_file, analysis_type)
@task
def run_analyses_on_all_experiments(top_dir='.', force_analysis_type=''):
for results_file in find_filenames(top_dir, '*.hdf5'):
run_analyses(results_file, force_analysis_type=force_analysis_type)
| 47.890052
| 112
| 0.561277
|
cda07cff0ebf6a216bc25000a7bcd2cf2d5a3295
| 394
|
py
|
Python
|
testapp/wagtail_wordpress_importer/migrations/0071_remove_customfieldslayoutsubfield_custom_field.py
|
nickmoreton/wagtail_wordpress_importer
|
fbe6b60ae624edac3f42a62ce30af4a0c548b4ed
|
[
"MIT"
] | null | null | null |
testapp/wagtail_wordpress_importer/migrations/0071_remove_customfieldslayoutsubfield_custom_field.py
|
nickmoreton/wagtail_wordpress_importer
|
fbe6b60ae624edac3f42a62ce30af4a0c548b4ed
|
[
"MIT"
] | null | null | null |
testapp/wagtail_wordpress_importer/migrations/0071_remove_customfieldslayoutsubfield_custom_field.py
|
nickmoreton/wagtail_wordpress_importer
|
fbe6b60ae624edac3f42a62ce30af4a0c548b4ed
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.4 on 2021-01-11 15:28
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wagtail_wordpress_importer', '0070_customfieldslayoutsubfield_custom_field'),
]
operations = [
migrations.RemoveField(
model_name='customfieldslayoutsubfield',
name='custom_field',
),
]
| 21.888889
| 87
| 0.659898
|
6798e3b8b9d54255157777fb40998494c62af670
| 1,614
|
py
|
Python
|
scrape_extensions.py
|
LaudateCorpus1/file-extensions
|
c7f9fb815579c0d24fbc99c5cfa42d21feb44a53
|
[
"MIT"
] | 15
|
2018-03-01T19:29:33.000Z
|
2021-12-11T22:44:41.000Z
|
scrape_extensions.py
|
LaudateCorpus1/file-extensions
|
c7f9fb815579c0d24fbc99c5cfa42d21feb44a53
|
[
"MIT"
] | 1
|
2021-08-20T04:45:34.000Z
|
2021-08-20T04:45:34.000Z
|
scrape_extensions.py
|
LaudateCorpus1/file-extensions
|
c7f9fb815579c0d24fbc99c5cfa42d21feb44a53
|
[
"MIT"
] | 6
|
2018-04-18T13:14:52.000Z
|
2021-08-20T04:45:25.000Z
|
#! /usr/bin/env python3
'''
Scrapes file extensions for various file types from FileInfo.com.
The file types are defined in the constants module.
'''
import io
import json
from time import sleep
from urllib.request import urlopen
from bs4 import BeautifulSoup
from constants import BASE_URL, FILE_TYPES
EXTENSIONS_DICT = {}
EXTENSIONS_BY_TYPE = {}
def make_soup(url):
html = urlopen(url).read()
return BeautifulSoup(html, 'lxml')
def get_extensions_for(type):
soup = make_soup(BASE_URL + FILE_TYPES[type]['url'])
extension_table = soup.find('tbody')
EXTENSIONS_BY_TYPE[type] = []
for row in extension_table.find_all('tr'):
cols = row.find_all('td')
extension = cols[0].get_text()
EXTENSIONS_BY_TYPE[type].append(extension)
EXTENSIONS_DICT[extension] = {}
EXTENSIONS_DICT[extension]['type'] = type
EXTENSIONS_DICT[extension]['description'] = cols[1].get_text()
def get_all_extensions():
for type in FILE_TYPES:
get_extensions_for(type)
sleep(1)
def write_dict_to_json_file(dictionary, filename):
with io.open(filename, 'w', encoding='utf8') as file:
json_str = json.dumps(dictionary,
ensure_ascii=False,
indent=4,
sort_keys=True,
separators=(',', ': '))
file.write(json_str)
if __name__ == '__main__':
get_all_extensions()
write_dict_to_json_file(EXTENSIONS_DICT, 'extensions.json')
write_dict_to_json_file(EXTENSIONS_BY_TYPE, 'extensions_by_type.json')
| 26.459016
| 74
| 0.652416
|
3fffa0af97d5037d77bce64c36dacc660fd170cd
| 332
|
py
|
Python
|
scrapybot/items/proxylist.py
|
roadt/scrapybot
|
c4588f3d2c354a464f67f7199933775bc7a859e0
|
[
"MIT"
] | null | null | null |
scrapybot/items/proxylist.py
|
roadt/scrapybot
|
c4588f3d2c354a464f67f7199933775bc7a859e0
|
[
"MIT"
] | null | null | null |
scrapybot/items/proxylist.py
|
roadt/scrapybot
|
c4588f3d2c354a464f67f7199933775bc7a859e0
|
[
"MIT"
] | 1
|
2015-09-11T05:59:42.000Z
|
2015-09-11T05:59:42.000Z
|
from scrapy.item import Item, Field
class Proxy(Item):
''' info of proxy server'''
ip = Field()
port = Field()
typ = Field()
anonymity = Field()
country = Field()
region = Field()
city = Field()
uptime = Field()
response = Field()
transfer = Field()
key = Field()
| 13.28
| 35
| 0.527108
|
9002e4cef505e332ade64a1184afc0c68664ed40
| 2,494
|
py
|
Python
|
yt_dlp/extractor/cultureunplugged.py
|
mrBliss/yt-dlp
|
aecd021656b672dbb617e5bae54a8986f9c4ebaf
|
[
"Unlicense"
] | 80
|
2021-05-25T11:33:49.000Z
|
2022-03-29T20:36:53.000Z
|
yt_dlp/extractor/cultureunplugged.py
|
mrBliss/yt-dlp
|
aecd021656b672dbb617e5bae54a8986f9c4ebaf
|
[
"Unlicense"
] | 53
|
2017-04-12T19:53:18.000Z
|
2022-02-22T10:33:13.000Z
|
yt_dlp/extractor/cultureunplugged.py
|
mrBliss/yt-dlp
|
aecd021656b672dbb617e5bae54a8986f9c4ebaf
|
[
"Unlicense"
] | 22
|
2021-05-07T05:01:27.000Z
|
2022-03-26T19:10:54.000Z
|
from __future__ import unicode_literals
import time
from .common import InfoExtractor
from ..utils import (
int_or_none,
HEADRequest,
)
class CultureUnpluggedIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?cultureunplugged\.com/documentary/watch-online/play/(?P<id>\d+)(?:/(?P<display_id>[^/]+))?'
_TESTS = [{
'url': 'http://www.cultureunplugged.com/documentary/watch-online/play/53662/The-Next--Best-West',
'md5': 'ac6c093b089f7d05e79934dcb3d228fc',
'info_dict': {
'id': '53662',
'display_id': 'The-Next--Best-West',
'ext': 'mp4',
'title': 'The Next, Best West',
'description': 'md5:0423cd00833dea1519cf014e9d0903b1',
'thumbnail': r're:^https?://.*\.jpg$',
'creator': 'Coldstream Creative',
'duration': 2203,
'view_count': int,
}
}, {
'url': 'http://www.cultureunplugged.com/documentary/watch-online/play/53662',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = self._match_valid_url(url)
video_id = mobj.group('id')
display_id = mobj.group('display_id') or video_id
# request setClientTimezone.php to get PHPSESSID cookie which is need to get valid json data in the next request
self._request_webpage(HEADRequest(
'http://www.cultureunplugged.com/setClientTimezone.php?timeOffset=%d' % -(time.timezone / 3600)), display_id)
movie_data = self._download_json(
'http://www.cultureunplugged.com/movie-data/cu-%s.json' % video_id, display_id)
video_url = movie_data['url']
title = movie_data['title']
description = movie_data.get('synopsis')
creator = movie_data.get('producer')
duration = int_or_none(movie_data.get('duration'))
view_count = int_or_none(movie_data.get('views'))
thumbnails = [{
'url': movie_data['%s_thumb' % size],
'id': size,
'preference': preference,
} for preference, size in enumerate((
'small', 'large')) if movie_data.get('%s_thumb' % size)]
return {
'id': video_id,
'display_id': display_id,
'url': video_url,
'title': title,
'description': description,
'creator': creator,
'duration': duration,
'view_count': view_count,
'thumbnails': thumbnails,
}
| 35.628571
| 129
| 0.585405
|
05c54c3c5af3a72b2a383978cf4c6d09f01e4a66
| 282
|
py
|
Python
|
custom_components/senz/const.py
|
astrandb/senz_hass
|
6725d37fd9c6d250ac10a16e68c56908bf1c8404
|
[
"MIT"
] | 2
|
2022-01-15T09:55:58.000Z
|
2022-02-10T10:13:35.000Z
|
custom_components/senz/const.py
|
astrandb/senz_hass
|
6725d37fd9c6d250ac10a16e68c56908bf1c8404
|
[
"MIT"
] | 4
|
2022-01-15T19:41:28.000Z
|
2022-02-14T16:01:47.000Z
|
custom_components/senz/const.py
|
astrandb/senz_hass
|
6725d37fd9c6d250ac10a16e68c56908bf1c8404
|
[
"MIT"
] | null | null | null |
"""Constants for the SENZ WiFi integration."""
DOMAIN = "senz"
VERSION = "0.0.6"
SENZ_API = "https://api.senzthermostat.nvent.com/api/v1"
OAUTH2_AUTHORIZE = "https://id.senzthermostat.nvent.com/connect/authorize"
OAUTH2_TOKEN = "https://id.senzthermostat.nvent.com/connect/token"
| 31.333333
| 74
| 0.748227
|
eafa146f5dfa1a167fbebef9de65fa30ecd042e0
| 2,842
|
py
|
Python
|
rllib/agents/a3c/a3c.py
|
firebolt55439/ray
|
215300b070628c06f0106906fc6c03bd70ebf140
|
[
"Apache-2.0"
] | null | null | null |
rllib/agents/a3c/a3c.py
|
firebolt55439/ray
|
215300b070628c06f0106906fc6c03bd70ebf140
|
[
"Apache-2.0"
] | null | null | null |
rllib/agents/a3c/a3c.py
|
firebolt55439/ray
|
215300b070628c06f0106906fc6c03bd70ebf140
|
[
"Apache-2.0"
] | null | null | null |
import logging
from ray.rllib.agents.a3c.a3c_tf_policy import A3CTFPolicy
from ray.rllib.agents.trainer import with_common_config
from ray.rllib.agents.trainer_template import build_trainer
from ray.rllib.execution.rollout_ops import AsyncGradients
from ray.rllib.execution.train_ops import ApplyGradients
from ray.rllib.execution.metric_ops import StandardMetricsReporting
logger = logging.getLogger(__name__)
# yapf: disable
# __sphinx_doc_begin__
DEFAULT_CONFIG = with_common_config({
# Should use a critic as a baseline (otherwise don't use value baseline;
# required for using GAE).
"use_critic": True,
# If true, use the Generalized Advantage Estimator (GAE)
# with a value function, see https://arxiv.org/pdf/1506.02438.pdf.
"use_gae": True,
# Size of rollout batch
"rollout_fragment_length": 10,
# GAE(gamma) parameter
"lambda": 1.0,
# Max global norm for each gradient calculated by worker
"grad_clip": 40.0,
# Learning rate
"lr": 0.0001,
# Learning rate schedule
"lr_schedule": None,
# Value Function Loss coefficient
"vf_loss_coeff": 0.5,
# Entropy coefficient
"entropy_coeff": 0.01,
# Min time per iteration
"min_iter_time_s": 5,
# Workers sample async. Note that this increases the effective
# rollout_fragment_length by up to 5x due to async buffering of batches.
"sample_async": True,
# Use the new "trajectory view API" to collect samples and produce
# model- and policy inputs.
"_use_trajectory_view_api": True,
})
# __sphinx_doc_end__
# yapf: enable
def get_policy_class(config):
if config["framework"] == "torch":
from ray.rllib.agents.a3c.a3c_torch_policy import \
A3CTorchPolicy
return A3CTorchPolicy
else:
return A3CTFPolicy
def validate_config(config):
if config["entropy_coeff"] < 0:
raise DeprecationWarning("`entropy_coeff` must be >= 0")
if config["sample_async"] and config["framework"] == "torch":
config["sample_async"] = False
logger.warning("`sample_async=True` is not supported for PyTorch! "
"Multithreading can lead to crashes.")
def execution_plan(workers, config):
# For A3C, compute policy gradients remotely on the rollout workers.
grads = AsyncGradients(workers)
# Apply the gradients as they arrive. We set update_all to False so that
# only the worker sending the gradient is updated with new weights.
train_op = grads.for_each(ApplyGradients(workers, update_all=False))
return StandardMetricsReporting(train_op, workers, config)
A3CTrainer = build_trainer(
name="A3C",
default_config=DEFAULT_CONFIG,
default_policy=A3CTFPolicy,
get_policy_class=get_policy_class,
validate_config=validate_config,
execution_plan=execution_plan)
| 33.833333
| 76
| 0.720267
|
13b10d26ffa93906cc33cfb44730f1718d345af4
| 756
|
py
|
Python
|
pages/result_sk.py
|
sunjeet-khokhar/tau-playwright-workshop
|
b2133164f8432e8b2564a7bfa91e4be9a9db62ca
|
[
"MIT"
] | 2
|
2021-12-07T19:22:26.000Z
|
2021-12-12T04:36:20.000Z
|
pages/result_sk.py
|
sunjeet-khokhar/tau-playwright-workshop
|
b2133164f8432e8b2564a7bfa91e4be9a9db62ca
|
[
"MIT"
] | null | null | null |
pages/result_sk.py
|
sunjeet-khokhar/tau-playwright-workshop
|
b2133164f8432e8b2564a7bfa91e4be9a9db62ca
|
[
"MIT"
] | null | null | null |
"""
This module contains DuckDuckGoResultPage,
the page object for the DuckDuckGo result page.
"""
from playwright.sync_api import Page
class ResultsPage:
SEARCH_FIELD= "[id='search_form_input']"
RESULT_TITLES = "[data-testid=result-title-a]"
def __init__(self,page : Page):
self.page = page
def get_search_field_value(self):
return(self.page.input_value(self.SEARCH_FIELD))
def wait_for_nth_result_to_load(self,num):
self.page.locator(self.RESULT_TITLES + ">> nth=" +num).wait_for()
def get_inner_text_of_all_results(self):
return(self.page.locator(self.RESULT_TITLES).all_inner_texts())
def get_title_of_page(self):
return(self.page.title())
| 28
| 73
| 0.675926
|
0156e438e9e24820943c9e48b04565710ea2fd4b
| 1,632
|
py
|
Python
|
ppocr/postprocess/__init__.py
|
Bourne-M/PaddleOCR
|
865e737413d430798b8c17525dcc22db4d106752
|
[
"Apache-2.0"
] | 506
|
2020-10-07T04:06:40.000Z
|
2022-03-31T20:28:25.000Z
|
ppocr/postprocess/__init__.py
|
Bourne-M/PaddleOCR
|
865e737413d430798b8c17525dcc22db4d106752
|
[
"Apache-2.0"
] | 80
|
2020-11-19T14:02:56.000Z
|
2022-03-29T10:56:19.000Z
|
ppocr/postprocess/__init__.py
|
Bourne-M/PaddleOCR
|
865e737413d430798b8c17525dcc22db4d106752
|
[
"Apache-2.0"
] | 98
|
2020-10-22T06:37:34.000Z
|
2022-03-31T04:57:22.000Z
|
# copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
__all__ = ['build_post_process']
def build_post_process(config, global_config=None):
from .db_postprocess import DBPostProcess
from .east_postprocess import EASTPostProcess
from .sast_postprocess import SASTPostProcess
from .rec_postprocess import CTCLabelDecode, AttnLabelDecode, SRNLabelDecode
from .cls_postprocess import ClsPostProcess
support_dict = [
'DBPostProcess', 'EASTPostProcess', 'SASTPostProcess', 'CTCLabelDecode',
'AttnLabelDecode', 'ClsPostProcess', 'SRNLabelDecode'
]
config = copy.deepcopy(config)
module_name = config.pop('name')
if global_config is not None:
config.update(global_config)
assert module_name in support_dict, Exception(
'post process only support {}'.format(support_dict))
module_class = eval(module_name)(**config)
return module_class
| 36.266667
| 80
| 0.761642
|
d09569eb7509b6ba89eb69c50cb0f8eac9a0e9ff
| 2,600
|
py
|
Python
|
httprunner/locusts.py
|
QiChangYin/MultipleInterfaceManager
|
0732cbd2dc9065aa4947ab3243136450874579a4
|
[
"MIT"
] | null | null | null |
httprunner/locusts.py
|
QiChangYin/MultipleInterfaceManager
|
0732cbd2dc9065aa4947ab3243136450874579a4
|
[
"MIT"
] | null | null | null |
httprunner/locusts.py
|
QiChangYin/MultipleInterfaceManager
|
0732cbd2dc9065aa4947ab3243136450874579a4
|
[
"MIT"
] | 1
|
2019-07-04T12:46:20.000Z
|
2019-07-04T12:46:20.000Z
|
# encoding: utf-8
import io
import multiprocessing
import os
import sys
from httprunner.logger import color_print
from httprunner.testcase import TestcaseLoader
from locust.main import main
def parse_locustfile(file_path):
""" parse testcase file and return locustfile path.
if file_path is a Python file, assume it is a locustfile
if file_path is a YAML/JSON file, convert it to locustfile
"""
if not os.path.isfile(file_path):
color_print("file path invalid, exit.", "RED")
sys.exit(1)
file_suffix = os.path.splitext(file_path)[1]
if file_suffix == ".py":
locustfile_path = file_path
elif file_suffix in ['.yaml', '.yml', '.json']:
locustfile_path = gen_locustfile(file_path)
else:
# '' or other suffix
color_print("file type should be YAML/JSON/Python, exit.", "RED")
sys.exit(1)
return locustfile_path
def gen_locustfile(testcase_file_path):
""" generate locustfile from template.
"""
locustfile_path = 'locustfile.py'
template_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"templates",
"locustfile_template"
)
testset = TestcaseLoader.load_test_file(testcase_file_path)
host = testset.get("config", {}).get("request", {}).get("base_url", "")
with io.open(template_path, encoding='utf-8') as template:
with io.open(locustfile_path, 'w', encoding='utf-8') as locustfile:
template_content = template.read()
template_content = template_content.replace("$HOST", host)
template_content = template_content.replace("$TESTCASE_FILE", testcase_file_path)
locustfile.write(template_content)
return locustfile_path
def start_master(sys_argv):
sys_argv.append("--master")
sys.argv = sys_argv
main()
def start_slave(sys_argv):
if "--slave" not in sys_argv:
sys_argv.extend(["--slave"])
sys.argv = sys_argv
main()
def run_locusts_with_processes(sys_argv, processes_count):
processes = []
manager = multiprocessing.Manager()
for _ in range(processes_count):
p_slave = multiprocessing.Process(target=start_slave, args=(sys_argv,))
p_slave.daemon = True
p_slave.start()
processes.append(p_slave)
try:
if "--slave" in sys_argv:
[process.join() for process in processes]
else:
start_master(sys_argv)
except KeyboardInterrupt:
manager.shutdown()
| 30.952381
| 94
| 0.641154
|
0fe775cf66e2cc0b7685725685c992dbbb84d903
| 26,468
|
py
|
Python
|
tensorflow/contrib/keras/python/keras/engine/topology_test.py
|
ralic/tensorflow
|
1209491913def44650d6457c60a6e41d56de3306
|
[
"Apache-2.0"
] | 1
|
2017-09-05T02:22:07.000Z
|
2017-09-05T02:22:07.000Z
|
tensorflow/contrib/keras/python/keras/engine/topology_test.py
|
ralic/tensorflow
|
1209491913def44650d6457c60a6e41d56de3306
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/contrib/keras/python/keras/engine/topology_test.py
|
ralic/tensorflow
|
1209491913def44650d6457c60a6e41d56de3306
|
[
"Apache-2.0"
] | 1
|
2021-01-25T14:18:12.000Z
|
2021-01-25T14:18:12.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#,============================================================================
"""Tests for layer graphs construction & handling."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import numpy as np
from tensorflow.contrib.keras.python import keras
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
try:
import yaml # pylint:disable=g-import-not-at-top
except ImportError:
yaml = None
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
class TopologyConstructionTest(test.TestCase):
def test_get_updates_for(self):
a = keras.layers.Input(shape=(2,))
dense_layer = keras.layers.Dense(1)
dense_layer.add_update(0, inputs=a)
dense_layer.add_update(1, inputs=None)
self.assertListEqual(dense_layer.get_updates_for(a), [0])
self.assertListEqual(dense_layer.get_updates_for(None), [1])
def test_get_losses_for(self):
a = keras.layers.Input(shape=(2,))
dense_layer = keras.layers.Dense(1)
dense_layer.add_loss(0, inputs=a)
dense_layer.add_loss(1, inputs=None)
self.assertListEqual(dense_layer.get_losses_for(a), [0])
self.assertListEqual(dense_layer.get_losses_for(None), [1])
def test_trainable_weights(self):
a = keras.layers.Input(shape=(2,))
b = keras.layers.Dense(1)(a)
model = keras.models.Model(a, b)
weights = model.weights
self.assertListEqual(model.trainable_weights, weights)
self.assertListEqual(model.non_trainable_weights, [])
model.trainable = False
self.assertListEqual(model.trainable_weights, [])
self.assertListEqual(model.non_trainable_weights, weights)
model.trainable = True
self.assertListEqual(model.trainable_weights, weights)
self.assertListEqual(model.non_trainable_weights, [])
model.layers[1].trainable = False
self.assertListEqual(model.trainable_weights, [])
self.assertListEqual(model.non_trainable_weights, weights)
# sequential model
model = keras.models.Sequential()
model.add(keras.layers.Dense(1, input_dim=2))
weights = model.weights
self.assertListEqual(model.trainable_weights, weights)
self.assertListEqual(model.non_trainable_weights, [])
model.trainable = False
self.assertListEqual(model.trainable_weights, [])
self.assertListEqual(model.non_trainable_weights, weights)
model.trainable = True
self.assertListEqual(model.trainable_weights, weights)
self.assertListEqual(model.non_trainable_weights, [])
model.layers[0].trainable = False
self.assertListEqual(model.trainable_weights, [])
self.assertListEqual(model.non_trainable_weights, weights)
def test_weight_loading(self):
with self.test_session():
a = keras.layers.Input(shape=(2,))
x = keras.layers.Dense(3)(a)
b = keras.layers.Dense(1)(x)
model = keras.models.Model(a, b)
x = np.random.random((3, 2))
ref_y = model.predict(x)
weights = model.get_weights()
model.set_weights(weights)
y = model.predict(x)
self.assertAllClose(ref_y, y)
with self.assertRaises(ValueError):
model.set_weights(weights[1:])
with self.assertRaises(ValueError):
model.set_weights(weights[::-1])
if h5py is None:
return # Skip rest of test if H5py isn't available.
temp_dir = self.get_temp_dir()
self.addCleanup(shutil.rmtree, temp_dir)
h5_path = os.path.join(temp_dir, 'test.h5')
model.save_weights(h5_path)
model.load_weights(h5_path)
y = model.predict(x)
self.assertAllClose(ref_y, y)
model.load_weights(h5_path, by_name=True)
y = model.predict(x)
self.assertAllClose(ref_y, y)
def test_learning_phase(self):
with self.test_session():
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
a_2 = keras.layers.Dense(16, name='dense_1')(a)
dp = keras.layers.Dropout(0.5, name='dropout')
b_2 = dp(b)
self.assertFalse(a_2._uses_learning_phase)
self.assertTrue(b_2._uses_learning_phase)
# test merge
m = keras.layers.concatenate([a_2, b_2])
self.assertTrue(m._uses_learning_phase)
# Test recursion
model = keras.models.Model([a, b], [a_2, b_2])
self.assertTrue(model.uses_learning_phase)
c = keras.layers.Input(shape=(32,), name='input_c')
d = keras.layers.Input(shape=(32,), name='input_d')
c_2, b_2 = model([c, d])
self.assertTrue(c_2._uses_learning_phase)
self.assertTrue(b_2._uses_learning_phase)
# try actually running graph
fn = keras.backend.function(
model.inputs + [keras.backend.learning_phase()], model.outputs)
input_a_np = np.random.random((10, 32))
input_b_np = np.random.random((10, 32))
fn_outputs_no_dp = fn([input_a_np, input_b_np, 0])
fn_outputs_dp = fn([input_a_np, input_b_np, 1])
# output a: nothing changes
self.assertEqual(fn_outputs_no_dp[0].sum(), fn_outputs_dp[0].sum())
# output b: dropout applied
self.assertNotEqual(fn_outputs_no_dp[1].sum(), fn_outputs_dp[1].sum())
def test_layer_call_arguments(self):
# Test the ability to pass and serialize arguments to `call`.
inp = keras.layers.Input(shape=(2,))
x = keras.layers.Dense(3)(inp)
x = keras.layers.Dropout(0.5)(x, training=True)
model = keras.models.Model(inp, x)
self.assertFalse(model.uses_learning_phase)
# Test that argument is kept when applying the model
inp2 = keras.layers.Input(shape=(2,))
out2 = model(inp2)
self.assertFalse(out2._uses_learning_phase)
# Test that argument is kept after loading a model
config = model.get_config()
model = keras.models.Model.from_config(config)
self.assertFalse(model.uses_learning_phase)
def test_node_construction(self):
# test basics
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
with self.assertRaises(ValueError):
_ = keras.layers.Input(shape=(32,), batch_shape=(10, 32))
with self.assertRaises(ValueError):
_ = keras.layers.Input(shape=(32,), unknwon_kwarg=None)
self.assertListEqual(a.get_shape().as_list(), [None, 32])
a_layer, a_node_index, a_tensor_index = a._keras_history
b_layer, _, _ = b._keras_history
self.assertEqual(len(a_layer.inbound_nodes), 1)
self.assertEqual(a_tensor_index, 0)
node = a_layer.inbound_nodes[a_node_index]
self.assertEqual(node.outbound_layer, a_layer)
self.assertListEqual(node.inbound_layers, [])
self.assertListEqual(node.input_tensors, [a])
self.assertListEqual(node.input_shapes, [(None, 32)])
self.assertListEqual(node.output_tensors, [a])
self.assertListEqual(node.output_shapes, [(None, 32)])
dense = keras.layers.Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
self.assertEqual(len(dense.inbound_nodes), 2)
self.assertEqual(len(dense.outbound_nodes), 0)
self.assertListEqual(dense.inbound_nodes[0].inbound_layers, [a_layer])
self.assertEqual(dense.inbound_nodes[0].outbound_layer, dense)
self.assertListEqual(dense.inbound_nodes[1].inbound_layers, [b_layer])
self.assertEqual(dense.inbound_nodes[1].outbound_layer, dense)
self.assertListEqual(dense.inbound_nodes[0].input_tensors, [a])
self.assertListEqual(dense.inbound_nodes[1].input_tensors, [b])
# test layer properties
test_layer = keras.layers.Dense(16, name='test_layer')
a_test = test_layer(a)
self.assertListEqual(test_layer.kernel.get_shape().as_list(), [32, 16])
self.assertEqual(test_layer.input, a)
self.assertEqual(test_layer.output, a_test)
self.assertEqual(test_layer.input_shape, (None, 32))
self.assertEqual(test_layer.output_shape, (None, 16))
self.assertEqual(dense.get_input_at(0), a)
self.assertEqual(dense.get_input_at(1), b)
self.assertEqual(dense.get_output_at(0), a_2)
self.assertEqual(dense.get_output_at(1), b_2)
self.assertEqual(dense.get_input_shape_at(0), (None, 32))
self.assertEqual(dense.get_input_shape_at(1), (None, 32))
self.assertEqual(dense.get_output_shape_at(0), (None, 16))
self.assertEqual(dense.get_output_shape_at(1), (None, 16))
self.assertEqual(dense.get_input_mask_at(0), None)
self.assertEqual(dense.get_input_mask_at(1), None)
self.assertEqual(dense.get_output_mask_at(0), None)
self.assertEqual(dense.get_output_mask_at(1), None)
def test_multi_input_layer(self):
with self.test_session():
# test multi-input layer
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
dense = keras.layers.Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
merged = keras.layers.concatenate([a_2, b_2], name='merge')
self.assertListEqual(merged.get_shape().as_list(), [None, 16 * 2])
merge_layer, merge_node_index, merge_tensor_index = merged._keras_history
self.assertEqual(merge_node_index, 0)
self.assertEqual(merge_tensor_index, 0)
self.assertEqual(len(merge_layer.inbound_nodes), 1)
self.assertEqual(len(merge_layer.outbound_nodes), 0)
self.assertEqual(len(merge_layer.inbound_nodes[0].input_tensors), 2)
self.assertEqual(len(merge_layer.inbound_nodes[0].inbound_layers), 2)
c = keras.layers.Dense(64, name='dense_2')(merged)
d = keras.layers.Dense(5, name='dense_3')(c)
model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
self.assertEqual(len(model.layers), 6)
output_shapes = model._compute_output_shape([(None, 32), (None, 32)])
self.assertListEqual(output_shapes[0].as_list(), [None, 64])
self.assertListEqual(output_shapes[1].as_list(), [None, 5])
self.assertListEqual(
model.compute_mask([a, b], [None, None]), [None, None])
# we don't check names of first 2 layers (inputs) because
# ordering of same-level layers is not fixed
self.assertListEqual([l.name for l in model.layers][2:],
['dense_1', 'merge', 'dense_2', 'dense_3'])
self.assertListEqual([l.name for l in model._input_layers],
['input_a', 'input_b'])
self.assertListEqual([l.name for l in model._output_layers],
['dense_2', 'dense_3'])
# actually run model
fn = keras.backend.function(model.inputs, model.outputs)
input_a_np = np.random.random((10, 32))
input_b_np = np.random.random((10, 32))
fn_outputs = fn([input_a_np, input_b_np])
self.assertListEqual([x.shape for x in fn_outputs], [(10, 64), (10, 5)])
# test get_source_inputs
self.assertListEqual(keras.engine.topology.get_source_inputs(c), [a, b])
# serialization / deserialization
json_config = model.to_json()
recreated_model = keras.models.model_from_json(json_config)
recreated_model.compile('rmsprop', 'mse')
self.assertListEqual([l.name for l in recreated_model.layers][2:],
['dense_1', 'merge', 'dense_2', 'dense_3'])
self.assertListEqual([l.name for l in recreated_model._input_layers],
['input_a', 'input_b'])
self.assertListEqual([l.name for l in recreated_model._output_layers],
['dense_2', 'dense_3'])
fn = keras.backend.function(recreated_model.inputs,
recreated_model.outputs)
input_a_np = np.random.random((10, 32))
input_b_np = np.random.random((10, 32))
fn_outputs = fn([input_a_np, input_b_np])
self.assertListEqual([x.shape for x in fn_outputs], [(10, 64), (10, 5)])
def test_recursion(self):
with self.test_session():
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
dense = keras.layers.Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
merged = keras.layers.concatenate([a_2, b_2], name='merge')
c = keras.layers.Dense(64, name='dense_2')(merged)
d = keras.layers.Dense(5, name='dense_3')(c)
model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
e = keras.layers.Input(shape=(32,), name='input_e')
f = keras.layers.Input(shape=(32,), name='input_f')
g, h = model([e, f])
self.assertListEqual(g.get_shape().as_list(), c.get_shape().as_list())
self.assertListEqual(h.get_shape().as_list(), d.get_shape().as_list())
# test separate manipulation of different layer outputs
i = keras.layers.Dense(7, name='dense_4')(h)
final_model = keras.models.Model(
inputs=[e, f], outputs=[i, g], name='final')
self.assertEqual(len(final_model.inputs), 2)
self.assertEqual(len(final_model.outputs), 2)
self.assertEqual(len(final_model.layers), 4)
# we don't check names of first 2 layers (inputs) because
# ordering of same-level layers is not fixed
self.assertListEqual([layer.name for layer in final_model.layers][2:],
['model', 'dense_4'])
self.assertListEqual(
model.compute_mask([e, f], [None, None]), [None, None])
self.assertListEqual(
final_model._compute_output_shape([(10, 32), (10, 32)]), [(10, 7),
(10, 64)])
# run recursive model
fn = keras.backend.function(final_model.inputs, final_model.outputs)
input_a_np = np.random.random((10, 32))
input_b_np = np.random.random((10, 32))
fn_outputs = fn([input_a_np, input_b_np])
self.assertListEqual([x.shape for x in fn_outputs], [(10, 7), (10, 64)])
# test serialization
model_config = final_model.get_config()
recreated_model = keras.models.Model.from_config(model_config)
fn = keras.backend.function(recreated_model.inputs,
recreated_model.outputs)
input_a_np = np.random.random((10, 32))
input_b_np = np.random.random((10, 32))
fn_outputs = fn([input_a_np, input_b_np])
self.assertListEqual([x.shape for x in fn_outputs], [(10, 7), (10, 64)])
def test_multi_input_multi_output_recursion(self):
with self.test_session():
# test multi-input multi-output
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
dense = keras.layers.Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
merged = keras.layers.concatenate([a_2, b_2], name='merge')
c = keras.layers.Dense(64, name='dense_2')(merged)
d = keras.layers.Dense(5, name='dense_3')(c)
model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
j = keras.layers.Input(shape=(32,), name='input_j')
k = keras.layers.Input(shape=(32,), name='input_k')
_, n = model([j, k])
o = keras.layers.Input(shape=(32,), name='input_o')
p = keras.layers.Input(shape=(32,), name='input_p')
q, _ = model([o, p])
self.assertListEqual(n.get_shape().as_list(), [None, 5])
self.assertListEqual(q.get_shape().as_list(), [None, 64])
s = keras.layers.concatenate([n, q], name='merge_nq')
self.assertListEqual(s.get_shape().as_list(), [None, 64 + 5])
# test with single output as 1-elem list
multi_io_model = keras.models.Model([j, k, o, p], [s])
fn = keras.backend.function(multi_io_model.inputs, multi_io_model.outputs)
fn_outputs = fn([
np.random.random((10, 32)), np.random.random((10, 32)),
np.random.random((10, 32)), np.random.random((10, 32))
])
self.assertListEqual([x.shape for x in fn_outputs], [(10, 69)])
# test with single output as tensor
multi_io_model = keras.models.Model([j, k, o, p], s)
fn = keras.backend.function(multi_io_model.inputs, multi_io_model.outputs)
fn_outputs = fn([
np.random.random((10, 32)), np.random.random((10, 32)),
np.random.random((10, 32)), np.random.random((10, 32))
])
# note that the output of the function will still be a 1-elem list
self.assertListEqual([x.shape for x in fn_outputs], [(10, 69)])
# test serialization
model_config = multi_io_model.get_config()
recreated_model = keras.models.Model.from_config(model_config)
fn = keras.backend.function(recreated_model.inputs,
recreated_model.outputs)
fn_outputs = fn([
np.random.random((10, 32)), np.random.random((10, 32)),
np.random.random((10, 32)), np.random.random((10, 32))
])
# note that the output of the function will still be a 1-elem list
self.assertListEqual([x.shape for x in fn_outputs], [(10, 69)])
config = model.get_config()
keras.models.Model.from_config(config)
model.summary()
json_str = model.to_json()
keras.models.model_from_json(json_str)
if yaml is not None:
yaml_str = model.to_yaml()
keras.models.model_from_yaml(yaml_str)
def test_invalid_graphs(self):
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
dense = keras.layers.Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
merged = keras.layers.concatenate([a_2, b_2], name='merge')
c = keras.layers.Dense(64, name='dense_2')(merged)
d = keras.layers.Dense(5, name='dense_3')(c)
model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
# input is not an Input tensor
j = keras.layers.Input(shape=(32,), name='input_j')
j = keras.layers.Dense(32)(j)
k = keras.layers.Input(shape=(32,), name='input_k')
m, n = model([j, k])
with self.assertRaises(Exception):
keras.models.Model([j, k], [m, n])
# disconnected graph
j = keras.layers.Input(shape=(32,), name='input_j')
k = keras.layers.Input(shape=(32,), name='input_k')
m, n = model([j, k])
with self.assertRaises(Exception):
keras.models.Model([j], [m, n])
# redundant outputs
j = keras.layers.Input(shape=(32,), name='input_j')
k = keras.layers.Input(shape=(32,), name='input_k')
m, n = model([j, k])
keras.models.Model([j, k], [m, n, n])
# redundant inputs
j = keras.layers.Input(shape=(32,), name='input_j')
k = keras.layers.Input(shape=(32,), name='input_k')
m, n = model([j, k])
with self.assertRaises(Exception):
keras.models.Model([j, k, j], [m, n])
# i have not idea what I'm doing: garbage as inputs/outputs
j = keras.layers.Input(shape=(32,), name='input_j')
k = keras.layers.Input(shape=(32,), name='input_k')
m, n = model([j, k])
with self.assertRaises(Exception):
keras.models.Model([j, k], [m, n, 0])
def test_raw_tf_compatibility(self):
# test calling layers/models on TF tensors
a = keras.layers.Input(shape=(32,), name='input_a')
b = keras.layers.Input(shape=(32,), name='input_b')
dense = keras.layers.Dense(16, name='dense_1')
a_2 = dense(a)
b_2 = dense(b)
merged = keras.layers.concatenate([a_2, b_2], name='merge')
c = keras.layers.Dense(64, name='dense_2')(merged)
d = keras.layers.Dense(5, name='dense_3')(c)
model = keras.models.Model(inputs=[a, b], outputs=[c, d], name='model')
j = keras.layers.Input(shape=(32,), name='input_j')
k = keras.layers.Input(shape=(32,), name='input_k')
m, n = model([j, k])
tf_model = keras.models.Model([j, k], [m, n])
j_tf = array_ops.placeholder(dtype=dtypes.float32, shape=(None, 32))
k_tf = array_ops.placeholder(dtype=dtypes.float32, shape=(None, 32))
m_tf, n_tf = tf_model([j_tf, k_tf])
self.assertListEqual(m_tf.get_shape().as_list(), [None, 64])
self.assertListEqual(n_tf.get_shape().as_list(), [None, 5])
# test merge
keras.layers.concatenate([j_tf, k_tf], axis=1)
keras.layers.add([j_tf, k_tf])
# test tensor input
x = array_ops.placeholder(shape=(None, 2), dtype=dtypes.float32)
keras.layers.InputLayer(input_tensor=x)
x = keras.layers.Input(tensor=x)
keras.layers.Dense(2)(x)
def test_basic_masking(self):
a = keras.layers.Input(shape=(10, 32), name='input_a')
b = keras.layers.Masking()(a)
model = keras.models.Model(a, b)
self.assertEqual(model.output_mask.get_shape().as_list(), [None, 10])
def test_weight_preprocessing(self):
input_dim = 3
output_dim = 3
size = 2
cases = [
[
(keras.layers.Bidirectional(keras.layers.SimpleRNN(2))),
[np.random.random((2, 1)), np.random.random((2, 1))],
(None, 3, 2),
],
[
(keras.layers.TimeDistributed(keras.layers.Dense(1))),
[np.random.random((2, 1)), np.random.random((1,))],
(None, 3, 2),
],
[
(keras.layers.Conv1D(output_dim, size, use_bias=False)),
[np.random.random((output_dim, input_dim, size, 1))],
(None, 4, input_dim),
],
[
(keras.layers.Conv2D(output_dim, size,
use_bias=False, data_format='channels_first')),
[np.random.random((output_dim, input_dim, size, size))],
(None, input_dim, 4, 4),
],
[
(keras.layers.Conv2DTranspose(output_dim, size,
use_bias=False,
data_format='channels_first')),
[np.random.random((output_dim, input_dim, size, size))],
(None, input_dim, 4, 4),
],
[
(keras.layers.Conv2DTranspose(output_dim, size,
use_bias=False,
data_format='channels_last')),
[np.random.random((size, size, input_dim, output_dim))],
(None, 4, 4, input_dim),
],
[
(keras.layers.Conv3D(output_dim, size,
use_bias=False, data_format='channels_first')),
[np.random.random((output_dim, input_dim, size, size, size))],
(None, input_dim, 4, 4, 4),
],
[
(keras.layers.GRU(output_dim)),
[np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,)),
np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,)),
np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,))],
(None, 4, input_dim),
],
[
(keras.layers.LSTM(output_dim)),
[np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,)),
np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,)),
np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,)),
np.random.random((input_dim, output_dim)),
np.random.random((output_dim, output_dim)),
np.random.random((output_dim,))],
(None, 4, input_dim),
],
]
for layer, weights, input_shape in cases:
layer.build(input_shape)
_ = keras.engine.topology.preprocess_weights_for_loading(
layer, weights, original_keras_version='1')
model = keras.models.Sequential([keras.layers.Dense(2, input_dim=2)])
_ = keras.engine.topology.preprocess_weights_for_loading(
model, model.weights, original_keras_version='1')
x = keras.Input((2,))
y = keras.layers.Dense(2)(x)
model = keras.models.Model(x, y)
_ = keras.engine.topology.preprocess_weights_for_loading(
model, model.weights, original_keras_version='1')
def test_layer_sharing_at_heterogenous_depth(self):
with self.test_session():
x_val = np.random.random((10, 5))
x = keras.Input(shape=(5,))
a = keras.layers.Dense(5, name='A')
b = keras.layers.Dense(5, name='B')
output = a(b(a(b(x))))
m = keras.models.Model(x, output)
output_val = m.predict(x_val)
config = m.get_config()
weights = m.get_weights()
m2 = keras.models.Model.from_config(config)
m2.set_weights(weights)
output_val_2 = m2.predict(x_val)
self.assertAllClose(output_val, output_val_2, atol=1e-6)
def test_layer_sharing_at_heterogenous_depth_with_concat(self):
with self.test_session():
input_shape = (16, 9, 3)
input_layer = keras.Input(shape=input_shape)
a = keras.layers.Dense(3, name='dense_A')
b = keras.layers.Dense(3, name='dense_B')
c = keras.layers.Dense(3, name='dense_C')
x1 = b(a(input_layer))
x2 = a(c(input_layer))
output = keras.layers.concatenate([x1, x2])
m = keras.models.Model(inputs=input_layer, outputs=output)
x_val = np.random.random((10, 16, 9, 3))
output_val = m.predict(x_val)
config = m.get_config()
weights = m.get_weights()
m2 = keras.models.Model.from_config(config)
m2.set_weights(weights)
output_val_2 = m2.predict(x_val)
self.assertAllClose(output_val, output_val_2, atol=1e-6)
if __name__ == '__main__':
test.main()
| 38.35942
| 80
| 0.639376
|
d7e66dda47f3797b486a992ceaebb95da37bf0e4
| 2,943
|
py
|
Python
|
bear_hug/atlas_viewer.py
|
PaulDJoachim/bear_hug
|
9d048052563974456bfd052aa7774da57678e1a3
|
[
"MIT"
] | 17
|
2018-02-27T07:03:03.000Z
|
2021-08-20T05:37:40.000Z
|
bear_hug/atlas_viewer.py
|
PaulDJoachim/bear_hug
|
9d048052563974456bfd052aa7774da57678e1a3
|
[
"MIT"
] | null | null | null |
bear_hug/atlas_viewer.py
|
PaulDJoachim/bear_hug
|
9d048052563974456bfd052aa7774da57678e1a3
|
[
"MIT"
] | 2
|
2019-10-10T16:53:12.000Z
|
2021-06-10T05:14:18.000Z
|
#! /usr/bin/env python3.6
from bear_hug import BearTerminal, BearLoop
from bear_utilities import copy_shape
from event import BearEventDispatcher
from widgets import InputScrollable, ClosingListener, Layout,\
Label, Widget, FPSCounter
from resources import XpLoader, Atlas
import os
class ElementBox(Layout):
"""
A box for a given widget.
Consists of widget itself, two lines of empty space around it and a '#'-box
around *that*. The upper border of the box also includes a title.
"""
def __init__(self, widget, name='Widget', color='#ff999999'):
if widget.width + 4 >= len(name) + 1:
box = self.generate_box(widget.width+2, widget.height+2,
color)
else:
box = self.generate_box(len(name), widget.height+2, color)
super().__init__(*box)
self.add_child(widget, pos=(2, 2))
self.add_child(Label(name, color='green'), pos=(1, 0))
self._rebuild_self()
@staticmethod
def generate_box(width, height, color):
"""
Return a #-bound box of a given (internal) size
:param width:
:param height:
:return:
"""
chars = []
chars.append(['#' for x in range(width+2)])
for y in range(height):
chars.append(['#'] + [' ' for x in range(width)] + ['#'])
chars.append(['#' for x in range(width+2)])
colors = copy_shape(chars, color)
return chars, colors
t = BearTerminal(size='46x52', title='Atlas',
filter=['keyboard', 'mouse'])
dispatcher = BearEventDispatcher()
loop = BearLoop(t, dispatcher)
dispatcher.register_listener(ClosingListener(), ['misc_input', 'tick'])
t.start()
atlas = Atlas(XpLoader(os.path.dirname(__file__)+'/demo_assets/test_atlas.xp'),
os.path.dirname(__file__)+'/demo_assets/test_atlas.json')
elements = []
positions = []
names = []
x = 1
y = 1
y_step = 0
for element in sorted(atlas.elements.keys()):
w = ElementBox(Widget(*atlas.get_element(element)), name=element)
elements.append(w)
if x + w.width > 45:
y += y_step
x = 1
y_step = 0
positions.append((x, y))
x += w.width + 1
if w.height + 1 >= y_step:
y_step = w.height + 1
view_height = y+y_step if y+y_step > 50 else 50
chars = [[' ' for _ in range(45)] for _ in range(view_height)]
colors = copy_shape(chars, 'white')
element_view = InputScrollable(chars, colors, view_pos=(0, 0),
view_size=(45, 50), right_bar=True)
for index, widget in enumerate(elements):
element_view.add_child(widget, positions[index])
dispatcher.register_listener(element_view, ['tick', 'key_down', 'service'])
dispatcher.register_listener(element_view.scrollable,
['tick', 'service'])
t.add_widget(element_view, pos=(0, 0))
t.add_widget(FPSCounter(), pos=(0, 51))
loop.run()
| 33.827586
| 79
| 0.615698
|
7a0c50250ac54c78102cb5304e6255e39bd2c12b
| 2,659
|
py
|
Python
|
setup.py
|
pantonante/quantstats
|
6d6cb52b854366bf8ebed7bf821e96f89af0c5ac
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
pantonante/quantstats
|
6d6cb52b854366bf8ebed7bf821e96f89af0c5ac
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
pantonante/quantstats
|
6d6cb52b854366bf8ebed7bf821e96f89af0c5ac
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""QuantStats: Portfolio analytics for quants
https://github.com/ranaroussi/quantstats
QuantStats performs portfolio profiling, to allow quants and
portfolio managers to understand their performance better,
by providing them with in-depth analytics and risk metrics.
"""
from setuptools import setup, find_packages
# from codecs import open
import io
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with io.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='QuantStats',
version='0.0.24',
description='Portfolio analytics for quants',
long_description=long_description,
url='https://github.com/ranaroussi/quantstats',
author='Ran Aroussi',
author_email='ran@aroussi.com',
license='Apache Software License',
classifiers=[
'License :: OSI Approved :: Apache Software License',
# 'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Science/Research',
'Topic :: Office/Business :: Financial',
'Topic :: Office/Business :: Financial :: Investment',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Mathematics',
# 'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
platforms=['any'],
keywords="""quant algotrading algorithmic-trading quantitative-trading
quantitative-analysis algo-trading visualization plotting""",
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'examples']),
install_requires=['pandas>=0.24.0', 'numpy>=1.15.0', 'scipy>=1.2.0',
'matplotlib>=3.0.0', 'seaborn>=0.9.0',
'tabulate>=0.8.0', 'yfinance>=0.1.44'],
entry_points={
'console_scripts': [
'sample=sample:main',
],
},
include_package_data=True,
# package_data={
# 'static': 'quantstats/report.html*'
# },
)
| 34.986842
| 77
| 0.635953
|
ceb2f025c797e80a4aaee80959f2ea28af6e45e3
| 1,155
|
py
|
Python
|
tf_rl/env/pybullet/wrapper.py
|
Rowing0914/TF2_RL
|
c1b7f9b376cbecf01deb17f76f8e761035ed336a
|
[
"MIT"
] | 8
|
2020-01-13T03:29:50.000Z
|
2021-11-19T00:59:42.000Z
|
tf_rl/env/pybullet/wrapper.py
|
Rowing0914/TF2_RL
|
c1b7f9b376cbecf01deb17f76f8e761035ed336a
|
[
"MIT"
] | 5
|
2020-11-13T17:40:40.000Z
|
2022-03-12T00:11:33.000Z
|
tf_rl/env/pybullet/wrapper.py
|
Rowing0914/TF2_RL
|
c1b7f9b376cbecf01deb17f76f8e761035ed336a
|
[
"MIT"
] | 1
|
2021-04-02T13:42:39.000Z
|
2021-04-02T13:42:39.000Z
|
import gym
from tf_rl.common.wrappers import WarpFrame, ScaledFloatFrame
from tf_rl.env.pybullet.env_list import ENVS
class PixelObservationWrapper(gym.ObservationWrapper):
""" check this post: https://github.com/openai/gym/pull/740#issuecomment-470382987 """
def __init__(self, env, img_shape=None):
gym.ObservationWrapper.__init__(self, env)
self.img_shape = img_shape
def observation(self, observation):
img = self.env.render(mode='rgb_array')
return img if self.img_shape is None else img.image_resize(self.img_shape)
def image_wrapper(env, scale=False, grayscale=False):
""" Configure environment for raw image observation in MuJoCo """
env = WarpFrame(env, grayscale=grayscale)
if scale:
env = ScaledFloatFrame(env)
return env
def make_env(env_name="HalfCheetah"):
if env_name.lower() == "cartpole":
from pybullet_envs.bullet.cartpole_bullet import CartPoleBulletEnv
env = CartPoleBulletEnv(renders=False)
else:
env = gym.make(ENVS[env_name.lower()])
env = PixelObservationWrapper(env=env)
env = image_wrapper(env=env)
return env
| 33.970588
| 90
| 0.716017
|
cf335d47a1f89ffd00a33b7f5520ea00f47dc634
| 790
|
py
|
Python
|
55. Jump Game.py
|
rohitpatwa/leetcode
|
f4826763e8f154cac9134d53b154b8299acd39a8
|
[
"Xnet",
"X11",
"CECILL-B"
] | 1
|
2020-07-15T20:48:27.000Z
|
2020-07-15T20:48:27.000Z
|
55. Jump Game.py
|
rohitpatwa/leetcode
|
f4826763e8f154cac9134d53b154b8299acd39a8
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
55. Jump Game.py
|
rohitpatwa/leetcode
|
f4826763e8f154cac9134d53b154b8299acd39a8
|
[
"Xnet",
"X11",
"CECILL-B"
] | null | null | null |
# Keep calculating max reachable index from current index. If curr index passes max reachable index, return False
class Solution(object):
def canJump(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
# Reverse array approach
# last_valid_index = -1
# for i in range(len(nums)-1, -1, -1):
# x = nums[i]
# if i+x >= last_valid_index:
# last_valid_index = i
# return (last_valid_index==0)
# Forward array approach
max_reachable = 0
for i,x in enumerate(nums):
if i > max_reachable:
return False
max_reachable = max(max_reachable, i + x)
if max_reachable >= len(nums)-1:
return True
| 32.916667
| 113
| 0.535443
|
98ea86bad81d25218f10b7525261d2e27a313af8
| 1,580
|
py
|
Python
|
pi_mqtt_gpio/modules/bme680.py
|
kapratt/pi-mqtt-gpio
|
199473d411381eaa1af9781081814a1130799b1e
|
[
"MIT"
] | null | null | null |
pi_mqtt_gpio/modules/bme680.py
|
kapratt/pi-mqtt-gpio
|
199473d411381eaa1af9781081814a1130799b1e
|
[
"MIT"
] | null | null | null |
pi_mqtt_gpio/modules/bme680.py
|
kapratt/pi-mqtt-gpio
|
199473d411381eaa1af9781081814a1130799b1e
|
[
"MIT"
] | null | null | null |
from pi_mqtt_gpio.modules import GenericSensor
REQUIREMENTS = ("smbus", "bme680")
CONFIG_SCHEMA = {
"i2c_bus_num": dict(type="integer", required=False, empty=False),
"chip_addr": dict(type="integer", required=True, empty=False),
}
SENSOR_SCHEMA = {
"type": dict(
type="string",
required=False,
empty=False,
default="temperature",
allowed=["temperature", "humidity", "pressure"],
)
}
class Sensor(GenericSensor):
"""
Implementation of Sensor class for the BME680 sensor.
"""
def __init__(self, config):
import smbus
import bme680
self.i2c_addr = config["chip_addr"]
self.i2c_device = smbus.SMBus(config["i2c_bus_num"])
def setup_sensor(self, config):
return True # nothing to do here
def get_value(self, config):
"""get the temperature, humidity or pressure value from the sensor"""
import bme680
data = bme680.BME680(self.i2c_addr, self.i2c_device)
#Setup Oversample settings; these settings can be customized in the config file potentially
data.set_humidity_oversample(bme680.OS_16X)
data.set_pressure_oversample(bme680.OS_16X)
data.set_temperature_oversample(bme680.OS_16X)
data.set_filter(bme680.FILTER_SIZE_3)
if config["type"] == "temperature":
return data.data.temperature
if config["type"] == "humidity":
return data.data.humidity
if config["type"] == "pressure":
return data.data.pressure
return None
| 29.811321
| 99
| 0.639873
|
db571d5baa95fd663764b4634ad3860fc2352143
| 1,741
|
py
|
Python
|
domain-parse-location/server.py
|
Logistic98/yoyo-algorithm
|
db4a866517ca77b0ce3b589a40c35d68a255e733
|
[
"Apache-2.0"
] | 2
|
2022-02-27T04:58:40.000Z
|
2022-02-27T04:58:45.000Z
|
domain-parse-location/server.py
|
Logistic98/yoyo-algorithm
|
db4a866517ca77b0ce3b589a40c35d68a255e733
|
[
"Apache-2.0"
] | null | null | null |
domain-parse-location/server.py
|
Logistic98/yoyo-algorithm
|
db4a866517ca77b0ce3b589a40c35d68a255e733
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import json
from flask import Flask, request, jsonify
from flask_cors import CORS
from code import ResponseCode, ResponseMessage
from log import logger
from utils import get_ip_by_domain, ip_get_location
# 创建一个服务
app = Flask(__name__)
CORS(app, supports_credentials=True)
"""
# 获取域名或IP的地理位置信息
"""
@app.route(rule='/geoIp/getDomainOrIpLocation', methods=['POST'])
def getDomainOrIpLocation():
# 获取JSON格式的请求体,并解析
request_data = request.get_data(as_text=True)
request_body = json.loads(request_data)
# 参数校验
param = request_body.get("param")
if not param:
fail_response = dict(code=ResponseCode.RARAM_FAIL, msg=ResponseMessage.RARAM_FAIL, data=None)
logger.error(fail_response)
return jsonify(fail_response)
# 将域名转换成IP(输入为IP的话保持不变)
try:
ip = get_ip_by_domain(param)
except Exception as e:
logger.error(e)
fail_response = dict(code=ResponseCode.BUSINESS_FAIL, msg=ResponseMessage.BUSINESS_FAIL, data=None)
logger.error(fail_response)
return jsonify(fail_response)
# 根据IP获取地理位置信息
try:
result = ip_get_location(ip)
except Exception as e:
logger.error(e)
fail_response = dict(code=ResponseCode.BUSINESS_FAIL, msg=ResponseMessage.BUSINESS_FAIL, data=None)
logger.error(fail_response)
return jsonify(fail_response)
# 成功的结果返回
success_response = dict(code=ResponseCode.SUCCESS, msg=ResponseMessage.SUCCESS, data=result)
logger.info(success_response)
return jsonify(success_response)
if __name__ == '__main__':
# 解决中文乱码问题
app.config['JSON_AS_ASCII'] = False
# 启动服务,指定主机和端口
app.run(host='0.0.0.0', port=5005, debug=False, threaded=True)
| 29.016667
| 107
| 0.71166
|
3d45ef93772ffee558bfff137d9710ab6427165a
| 1,133
|
py
|
Python
|
050_microsoft_easy_evaluateArithmeticTree.py
|
ericwangg/daily_coding_problem
|
ce401c512a49f9e2410b059e797b21056adab4ac
|
[
"MIT"
] | null | null | null |
050_microsoft_easy_evaluateArithmeticTree.py
|
ericwangg/daily_coding_problem
|
ce401c512a49f9e2410b059e797b21056adab4ac
|
[
"MIT"
] | null | null | null |
050_microsoft_easy_evaluateArithmeticTree.py
|
ericwangg/daily_coding_problem
|
ce401c512a49f9e2410b059e797b21056adab4ac
|
[
"MIT"
] | null | null | null |
# This problem was asked by Microsoft.
#
# Suppose an arithmetic expression is given as a binary tree. Each leaf is an integer and each internal node is one of '+', '−', '∗', or '/'.
#
# Given the root to such a tree, write a function to evaluate it.
#
# For example, given the following tree:
#
# *
# / \
# + +
# / \ / \
# 3 2 4 5
# You should return 45, as it is (3 + 2) * (4 + 5)
class Node:
def __init__(self, value):
self.left = None
self.value = value
self.right = None
# # '(3 + 2) * (4 + 5)'
# def build_arithmetic_tree(string):
def eval_arithmetic_tree(root):
if root.value.isnumeric():
return root.value # pass back up
else:
return eval("{} {} {}".format(eval_arithmetic_tree(root.left), root.value, eval_arithmetic_tree(root.right)))
if __name__ == '__main__':
# Creating the example tree.
root = Node('*')
root.left = Node('+')
root.left.left = Node('3')
root.left.right = Node('2')
root.right = Node('+')
root.right.left = Node('4')
root.right.right = Node('5')
print(eval_arithmetic_tree(root)) # and evaluate
| 27.634146
| 141
| 0.601942
|
35492e6a4712508a8e0b37b182cdd85bb543fb11
| 67,419
|
py
|
Python
|
Lib/test/support/__init__.py
|
Wind-River/cpython
|
9e82877bf15b360fe9ab74cf7e69210ec006a624
|
[
"0BSD"
] | 5
|
2019-04-28T05:24:54.000Z
|
2021-05-08T02:04:27.000Z
|
Lib/test/support/__init__.py
|
Wind-River/cpython
|
9e82877bf15b360fe9ab74cf7e69210ec006a624
|
[
"0BSD"
] | 11
|
2020-11-12T07:04:51.000Z
|
2022-01-01T14:00:52.000Z
|
Lib/test/support/__init__.py
|
Wind-River/cpython
|
9e82877bf15b360fe9ab74cf7e69210ec006a624
|
[
"0BSD"
] | 1
|
2020-10-30T14:25:00.000Z
|
2020-10-30T14:25:00.000Z
|
"""Supporting definitions for the Python regression tests."""
if __name__ != 'test.support':
raise ImportError('support must be imported from the test package')
import contextlib
import functools
import os
import re
import stat
import sys
import sysconfig
import time
import types
import unittest
from .testresult import get_test_runner
try:
from _testcapi import unicode_legacy_string
except ImportError:
unicode_legacy_string = None
__all__ = [
# globals
"PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast",
# exceptions
"Error", "TestFailed", "TestDidNotRun", "ResourceDenied",
# io
"record_original_stdout", "get_original_stdout", "captured_stdout",
"captured_stdin", "captured_stderr",
# unittest
"is_resource_enabled", "requires", "requires_freebsd_version",
"requires_linux_version", "requires_mac_ver",
"check_syntax_error",
"BasicTestRunner", "run_unittest", "run_doctest",
"requires_gzip", "requires_bz2", "requires_lzma",
"bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute",
"requires_IEEE_754", "requires_zlib",
"anticipate_failure", "load_package_tests", "detect_api_mismatch",
"check__all__", "skip_if_buggy_ucrt_strfptime",
"skip_if_restricted_mkfifo",
# sys
"is_jython", "is_android", "check_impl_detail", "unix_shell",
"setswitchinterval", "is_vxworks",
# network
"open_urlresource",
# processes
"reap_children",
# miscellaneous
"run_with_locale", "swap_item", "findfile",
"swap_attr", "Matcher", "set_memlimit", "SuppressCrashReport", "sortdict",
"run_with_tz", "PGO", "missing_compiler_executable",
"ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST",
"LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT",
]
# Timeout in seconds for tests using a network server listening on the network
# local loopback interface like 127.0.0.1.
#
# The timeout is long enough to prevent test failure: it takes into account
# that the client and the server can run in different threads or even different
# processes.
#
# The timeout should be long enough for connect(), recv() and send() methods
# of socket.socket.
LOOPBACK_TIMEOUT = 5.0
if sys.platform == 'win32' and ' 32 bit (ARM)' in sys.version:
# bpo-37553: test_socket.SendfileUsingSendTest is taking longer than 2
# seconds on Windows ARM32 buildbot
LOOPBACK_TIMEOUT = 10
elif sys.platform == 'vxworks':
LOOPBACK_TIMEOUT = 10
# Timeout in seconds for network requests going to the Internet. The timeout is
# short enough to prevent a test to wait for too long if the Internet request
# is blocked for whatever reason.
#
# Usually, a timeout using INTERNET_TIMEOUT should not mark a test as failed,
# but skip the test instead: see transient_internet().
INTERNET_TIMEOUT = 60.0
# Timeout in seconds to mark a test as failed if the test takes "too long".
#
# The timeout value depends on the regrtest --timeout command line option.
#
# If a test using SHORT_TIMEOUT starts to fail randomly on slow buildbots, use
# LONG_TIMEOUT instead.
SHORT_TIMEOUT = 30.0
# Timeout in seconds to detect when a test hangs.
#
# It is long enough to reduce the risk of test failure on the slowest Python
# buildbots. It should not be used to mark a test as failed if the test takes
# "too long". The timeout value depends on the regrtest --timeout command line
# option.
LONG_TIMEOUT = 5 * 60.0
class Error(Exception):
"""Base class for regression test exceptions."""
class TestFailed(Error):
"""Test failed."""
class TestDidNotRun(Error):
"""Test did not run any subtests."""
class ResourceDenied(unittest.SkipTest):
"""Test skipped because it requested a disallowed resource.
This is raised when a test calls requires() for a resource that
has not be enabled. It is used to distinguish between expected
and unexpected skips.
"""
def anticipate_failure(condition):
"""Decorator to mark a test that is known to be broken in some cases
Any use of this decorator should have a comment identifying the
associated tracker issue.
"""
if condition:
return unittest.expectedFailure
return lambda f: f
def load_package_tests(pkg_dir, loader, standard_tests, pattern):
"""Generic load_tests implementation for simple test packages.
Most packages can implement load_tests using this function as follows:
def load_tests(*args):
return load_package_tests(os.path.dirname(__file__), *args)
"""
if pattern is None:
pattern = "test*"
top_dir = os.path.dirname( # Lib
os.path.dirname( # test
os.path.dirname(__file__))) # support
package_tests = loader.discover(start_dir=pkg_dir,
top_level_dir=top_dir,
pattern=pattern)
standard_tests.addTests(package_tests)
return standard_tests
def get_attribute(obj, name):
"""Get an attribute, raising SkipTest if AttributeError is raised."""
try:
attribute = getattr(obj, name)
except AttributeError:
raise unittest.SkipTest("object %r has no attribute %r" % (obj, name))
else:
return attribute
verbose = 1 # Flag set to 0 by regrtest.py
use_resources = None # Flag set to [] by regrtest.py
max_memuse = 0 # Disable bigmem tests (they will still be run with
# small sizes, to make sure they work.)
real_max_memuse = 0
junit_xml_list = None # list of testsuite XML elements
failfast = False
# _original_stdout is meant to hold stdout at the time regrtest began.
# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
# The point is to have some flavor of stdout the user can actually see.
_original_stdout = None
def record_original_stdout(stdout):
global _original_stdout
_original_stdout = stdout
def get_original_stdout():
return _original_stdout or sys.stdout
def _force_run(path, func, *args):
try:
return func(*args)
except OSError as err:
if verbose >= 2:
print('%s: %s' % (err.__class__.__name__, err))
print('re-run %s%r' % (func.__name__, args))
os.chmod(path, stat.S_IRWXU)
return func(*args)
# Check whether a gui is actually available
def _is_gui_available():
if hasattr(_is_gui_available, 'result'):
return _is_gui_available.result
import platform
reason = None
if sys.platform.startswith('win') and platform.win32_is_iot():
reason = "gui is not available on Windows IoT Core"
elif sys.platform.startswith('win'):
# if Python is running as a service (such as the buildbot service),
# gui interaction may be disallowed
import ctypes
import ctypes.wintypes
UOI_FLAGS = 1
WSF_VISIBLE = 0x0001
class USEROBJECTFLAGS(ctypes.Structure):
_fields_ = [("fInherit", ctypes.wintypes.BOOL),
("fReserved", ctypes.wintypes.BOOL),
("dwFlags", ctypes.wintypes.DWORD)]
dll = ctypes.windll.user32
h = dll.GetProcessWindowStation()
if not h:
raise ctypes.WinError()
uof = USEROBJECTFLAGS()
needed = ctypes.wintypes.DWORD()
res = dll.GetUserObjectInformationW(h,
UOI_FLAGS,
ctypes.byref(uof),
ctypes.sizeof(uof),
ctypes.byref(needed))
if not res:
raise ctypes.WinError()
if not bool(uof.dwFlags & WSF_VISIBLE):
reason = "gui not available (WSF_VISIBLE flag not set)"
elif sys.platform == 'darwin':
# The Aqua Tk implementations on OS X can abort the process if
# being called in an environment where a window server connection
# cannot be made, for instance when invoked by a buildbot or ssh
# process not running under the same user id as the current console
# user. To avoid that, raise an exception if the window manager
# connection is not available.
from ctypes import cdll, c_int, pointer, Structure
from ctypes.util import find_library
app_services = cdll.LoadLibrary(find_library("ApplicationServices"))
if app_services.CGMainDisplayID() == 0:
reason = "gui tests cannot run without OS X window manager"
else:
class ProcessSerialNumber(Structure):
_fields_ = [("highLongOfPSN", c_int),
("lowLongOfPSN", c_int)]
psn = ProcessSerialNumber()
psn_p = pointer(psn)
if ( (app_services.GetCurrentProcess(psn_p) < 0) or
(app_services.SetFrontProcess(psn_p) < 0) ):
reason = "cannot run without OS X gui process"
# check on every platform whether tkinter can actually do anything
if not reason:
try:
from tkinter import Tk
root = Tk()
root.withdraw()
root.update()
root.destroy()
except Exception as e:
err_string = str(e)
if len(err_string) > 50:
err_string = err_string[:50] + ' [...]'
reason = 'Tk unavailable due to {}: {}'.format(type(e).__name__,
err_string)
_is_gui_available.reason = reason
_is_gui_available.result = not reason
return _is_gui_available.result
def is_resource_enabled(resource):
"""Test whether a resource is enabled.
Known resources are set by regrtest.py. If not running under regrtest.py,
all resources are assumed enabled unless use_resources has been set.
"""
return use_resources is None or resource in use_resources
def requires(resource, msg=None):
"""Raise ResourceDenied if the specified resource is not available."""
if not is_resource_enabled(resource):
if msg is None:
msg = "Use of the %r resource not enabled" % resource
raise ResourceDenied(msg)
if resource == 'gui' and not _is_gui_available():
raise ResourceDenied(_is_gui_available.reason)
def _requires_unix_version(sysname, min_version):
"""Decorator raising SkipTest if the OS is `sysname` and the version is less
than `min_version`.
For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if
the FreeBSD version is less than 7.2.
"""
import platform
min_version_txt = '.'.join(map(str, min_version))
version_txt = platform.release().split('-', 1)[0]
if platform.system() == sysname:
try:
version = tuple(map(int, version_txt.split('.')))
except ValueError:
skip = False
else:
skip = version < min_version
else:
skip = False
return unittest.skipIf(
skip,
f"{sysname} version {min_version_txt} or higher required, not "
f"{version_txt}"
)
def requires_freebsd_version(*min_version):
"""Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is
less than `min_version`.
For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD
version is less than 7.2.
"""
return _requires_unix_version('FreeBSD', min_version)
def requires_linux_version(*min_version):
"""Decorator raising SkipTest if the OS is Linux and the Linux version is
less than `min_version`.
For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux
version is less than 2.6.32.
"""
return _requires_unix_version('Linux', min_version)
def requires_mac_ver(*min_version):
"""Decorator raising SkipTest if the OS is Mac OS X and the OS X
version if less than min_version.
For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version
is lesser than 10.5.
"""
def decorator(func):
@functools.wraps(func)
def wrapper(*args, **kw):
if sys.platform == 'darwin':
import platform
version_txt = platform.mac_ver()[0]
try:
version = tuple(map(int, version_txt.split('.')))
except ValueError:
pass
else:
if version < min_version:
min_version_txt = '.'.join(map(str, min_version))
raise unittest.SkipTest(
"Mac OS X %s or higher required, not %s"
% (min_version_txt, version_txt))
return func(*args, **kw)
wrapper.min_version = min_version
return wrapper
return decorator
def system_must_validate_cert(f):
"""Skip the test on TLS certificate validation failures."""
@functools.wraps(f)
def dec(*args, **kwargs):
try:
f(*args, **kwargs)
except OSError as e:
if "CERTIFICATE_VERIFY_FAILED" in str(e):
raise unittest.SkipTest("system does not contain "
"necessary certificates")
raise
return dec
# A constant likely larger than the underlying OS pipe buffer size, to
# make writes blocking.
# Windows limit seems to be around 512 B, and many Unix kernels have a
# 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure.
# (see issue #17835 for a discussion of this number).
PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1
# A constant likely larger than the underlying OS socket buffer size, to make
# writes blocking.
# The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl
# on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643
# for a discussion of this number).
SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1
# decorator for skipping tests on non-IEEE 754 platforms
requires_IEEE_754 = unittest.skipUnless(
float.__getformat__("double").startswith("IEEE"),
"test requires IEEE 754 doubles")
def requires_zlib(reason='requires zlib'):
try:
import zlib
except ImportError:
zlib = None
return unittest.skipUnless(zlib, reason)
def requires_gzip(reason='requires gzip'):
try:
import gzip
except ImportError:
gzip = None
return unittest.skipUnless(gzip, reason)
def requires_bz2(reason='requires bz2'):
try:
import bz2
except ImportError:
bz2 = None
return unittest.skipUnless(bz2, reason)
def requires_lzma(reason='requires lzma'):
try:
import lzma
except ImportError:
lzma = None
return unittest.skipUnless(lzma, reason)
requires_legacy_unicode_capi = unittest.skipUnless(unicode_legacy_string,
'requires legacy Unicode C API')
is_jython = sys.platform.startswith('java')
is_android = hasattr(sys, 'getandroidapilevel')
is_vxworks = (sys.platform == "vxworks")
if sys.platform != 'win32':
unix_shell = '/system/bin/sh' if is_android else '/bin/sh'
else:
unix_shell = None
# Define the URL of a dedicated HTTP server for the network tests.
# The URL must use clear-text HTTP: no redirection to encrypted HTTPS.
TEST_HTTP_URL = "http://www.pythontest.net"
# Set by libregrtest/main.py so we can skip tests that are not
# useful for PGO
PGO = False
# Set by libregrtest/main.py if we are running the extended (time consuming)
# PGO task. If this is True, PGO is also True.
PGO_EXTENDED = False
# TEST_HOME_DIR refers to the top level directory of the "test" package
# that contains Python's regression test suite
TEST_SUPPORT_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_HOME_DIR = os.path.dirname(TEST_SUPPORT_DIR)
# TEST_DATA_DIR is used as a target download location for remote resources
TEST_DATA_DIR = os.path.join(TEST_HOME_DIR, "data")
def findfile(filename, subdir=None):
"""Try to find a file on sys.path or in the test directory. If it is not
found the argument passed to the function is returned (this does not
necessarily signal failure; could still be the legitimate path).
Setting *subdir* indicates a relative path to use to find the file
rather than looking directly in the path directories.
"""
if os.path.isabs(filename):
return filename
if subdir is not None:
filename = os.path.join(subdir, filename)
path = [TEST_HOME_DIR] + sys.path
for dn in path:
fn = os.path.join(dn, filename)
if os.path.exists(fn): return fn
return filename
def sortdict(dict):
"Like repr(dict), but in sorted order."
items = sorted(dict.items())
reprpairs = ["%r: %r" % pair for pair in items]
withcommas = ", ".join(reprpairs)
return "{%s}" % withcommas
def check_syntax_error(testcase, statement, errtext='', *, lineno=None, offset=None):
with testcase.assertRaisesRegex(SyntaxError, errtext) as cm:
compile(statement, '<test string>', 'exec')
err = cm.exception
testcase.assertIsNotNone(err.lineno)
if lineno is not None:
testcase.assertEqual(err.lineno, lineno)
testcase.assertIsNotNone(err.offset)
if offset is not None:
testcase.assertEqual(err.offset, offset)
def open_urlresource(url, *args, **kw):
import urllib.request, urllib.parse
from .os_helper import unlink
try:
import gzip
except ImportError:
gzip = None
check = kw.pop('check', None)
filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
fn = os.path.join(TEST_DATA_DIR, filename)
def check_valid_file(fn):
f = open(fn, *args, **kw)
if check is None:
return f
elif check(f):
f.seek(0)
return f
f.close()
if os.path.exists(fn):
f = check_valid_file(fn)
if f is not None:
return f
unlink(fn)
# Verify the requirement before downloading the file
requires('urlfetch')
if verbose:
print('\tfetching %s ...' % url, file=get_original_stdout())
opener = urllib.request.build_opener()
if gzip:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
f = opener.open(url, timeout=INTERNET_TIMEOUT)
if gzip and f.headers.get('Content-Encoding') == 'gzip':
f = gzip.GzipFile(fileobj=f)
try:
with open(fn, "wb") as out:
s = f.read()
while s:
out.write(s)
s = f.read()
finally:
f.close()
f = check_valid_file(fn)
if f is not None:
return f
raise TestFailed('invalid resource %r' % fn)
@contextlib.contextmanager
def captured_output(stream_name):
"""Return a context manager used by captured_stdout/stdin/stderr
that temporarily replaces the sys stream *stream_name* with a StringIO."""
import io
orig_stdout = getattr(sys, stream_name)
setattr(sys, stream_name, io.StringIO())
try:
yield getattr(sys, stream_name)
finally:
setattr(sys, stream_name, orig_stdout)
def captured_stdout():
"""Capture the output of sys.stdout:
with captured_stdout() as stdout:
print("hello")
self.assertEqual(stdout.getvalue(), "hello\\n")
"""
return captured_output("stdout")
def captured_stderr():
"""Capture the output of sys.stderr:
with captured_stderr() as stderr:
print("hello", file=sys.stderr)
self.assertEqual(stderr.getvalue(), "hello\\n")
"""
return captured_output("stderr")
def captured_stdin():
"""Capture the input to sys.stdin:
with captured_stdin() as stdin:
stdin.write('hello\\n')
stdin.seek(0)
# call test code that consumes from sys.stdin
captured = input()
self.assertEqual(captured, "hello")
"""
return captured_output("stdin")
def gc_collect():
"""Force as many objects as possible to be collected.
In non-CPython implementations of Python, this is needed because timely
deallocation is not guaranteed by the garbage collector. (Even in CPython
this can be the case in case of reference cycles.) This means that __del__
methods may be called later than expected and weakrefs may remain alive for
longer than expected. This function tries its best to force all garbage
objects to disappear.
"""
import gc
gc.collect()
if is_jython:
time.sleep(0.1)
gc.collect()
gc.collect()
@contextlib.contextmanager
def disable_gc():
import gc
have_gc = gc.isenabled()
gc.disable()
try:
yield
finally:
if have_gc:
gc.enable()
def python_is_optimized():
"""Find if Python was built with optimizations."""
cflags = sysconfig.get_config_var('PY_CFLAGS') or ''
final_opt = ""
for opt in cflags.split():
if opt.startswith('-O'):
final_opt = opt
return final_opt not in ('', '-O0', '-Og')
_header = 'nP'
_align = '0n'
if hasattr(sys, "getobjects"):
_header = '2P' + _header
_align = '0P'
_vheader = _header + 'n'
def calcobjsize(fmt):
import struct
return struct.calcsize(_header + fmt + _align)
def calcvobjsize(fmt):
import struct
return struct.calcsize(_vheader + fmt + _align)
_TPFLAGS_HAVE_GC = 1<<14
_TPFLAGS_HEAPTYPE = 1<<9
def check_sizeof(test, o, size):
import _testinternalcapi
result = sys.getsizeof(o)
# add GC header size
if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\
((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))):
size += _testinternalcapi.SIZEOF_PYGC_HEAD
msg = 'wrong size for %s: got %d, expected %d' \
% (type(o), result, size)
test.assertEqual(result, size, msg)
#=======================================================================
# Decorator for running a function in a different locale, correctly resetting
# it afterwards.
def run_with_locale(catstr, *locales):
def decorator(func):
def inner(*args, **kwds):
try:
import locale
category = getattr(locale, catstr)
orig_locale = locale.setlocale(category)
except AttributeError:
# if the test author gives us an invalid category string
raise
except:
# cannot retrieve original locale, so do nothing
locale = orig_locale = None
else:
for loc in locales:
try:
locale.setlocale(category, loc)
break
except:
pass
# now run the function, resetting the locale on exceptions
try:
return func(*args, **kwds)
finally:
if locale and orig_locale:
locale.setlocale(category, orig_locale)
inner.__name__ = func.__name__
inner.__doc__ = func.__doc__
return inner
return decorator
#=======================================================================
# Decorator for running a function in a specific timezone, correctly
# resetting it afterwards.
def run_with_tz(tz):
def decorator(func):
def inner(*args, **kwds):
try:
tzset = time.tzset
except AttributeError:
raise unittest.SkipTest("tzset required")
if 'TZ' in os.environ:
orig_tz = os.environ['TZ']
else:
orig_tz = None
os.environ['TZ'] = tz
tzset()
# now run the function, resetting the tz on exceptions
try:
return func(*args, **kwds)
finally:
if orig_tz is None:
del os.environ['TZ']
else:
os.environ['TZ'] = orig_tz
time.tzset()
inner.__name__ = func.__name__
inner.__doc__ = func.__doc__
return inner
return decorator
#=======================================================================
# Big-memory-test support. Separate from 'resources' because memory use
# should be configurable.
# Some handy shorthands. Note that these are used for byte-limits as well
# as size-limits, in the various bigmem tests
_1M = 1024*1024
_1G = 1024 * _1M
_2G = 2 * _1G
_4G = 4 * _1G
MAX_Py_ssize_t = sys.maxsize
def set_memlimit(limit):
global max_memuse
global real_max_memuse
sizes = {
'k': 1024,
'm': _1M,
'g': _1G,
't': 1024*_1G,
}
m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
re.IGNORECASE | re.VERBOSE)
if m is None:
raise ValueError('Invalid memory limit %r' % (limit,))
memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
real_max_memuse = memlimit
if memlimit > MAX_Py_ssize_t:
memlimit = MAX_Py_ssize_t
if memlimit < _2G - 1:
raise ValueError('Memory limit %r too low to be useful' % (limit,))
max_memuse = memlimit
class _MemoryWatchdog:
"""An object which periodically watches the process' memory consumption
and prints it out.
"""
def __init__(self):
self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid())
self.started = False
def start(self):
import warnings
try:
f = open(self.procfile, 'r')
except OSError as e:
warnings.warn('/proc not available for stats: {}'.format(e),
RuntimeWarning)
sys.stderr.flush()
return
import subprocess
with f:
watchdog_script = findfile("memory_watchdog.py")
self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script],
stdin=f,
stderr=subprocess.DEVNULL)
self.started = True
def stop(self):
if self.started:
self.mem_watchdog.terminate()
self.mem_watchdog.wait()
def bigmemtest(size, memuse, dry_run=True):
"""Decorator for bigmem tests.
'size' is a requested size for the test (in arbitrary, test-interpreted
units.) 'memuse' is the number of bytes per unit for the test, or a good
estimate of it. For example, a test that needs two byte buffers, of 4 GiB
each, could be decorated with @bigmemtest(size=_4G, memuse=2).
The 'size' argument is normally passed to the decorated test method as an
extra argument. If 'dry_run' is true, the value passed to the test method
may be less than the requested value. If 'dry_run' is false, it means the
test doesn't support dummy runs when -M is not specified.
"""
def decorator(f):
def wrapper(self):
size = wrapper.size
memuse = wrapper.memuse
if not real_max_memuse:
maxsize = 5147
else:
maxsize = size
if ((real_max_memuse or not dry_run)
and real_max_memuse < maxsize * memuse):
raise unittest.SkipTest(
"not enough memory: %.1fG minimum needed"
% (size * memuse / (1024 ** 3)))
if real_max_memuse and verbose:
print()
print(" ... expected peak memory use: {peak:.1f}G"
.format(peak=size * memuse / (1024 ** 3)))
watchdog = _MemoryWatchdog()
watchdog.start()
else:
watchdog = None
try:
return f(self, maxsize)
finally:
if watchdog:
watchdog.stop()
wrapper.size = size
wrapper.memuse = memuse
return wrapper
return decorator
def bigaddrspacetest(f):
"""Decorator for tests that fill the address space."""
def wrapper(self):
if max_memuse < MAX_Py_ssize_t:
if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31:
raise unittest.SkipTest(
"not enough memory: try a 32-bit build instead")
else:
raise unittest.SkipTest(
"not enough memory: %.1fG minimum needed"
% (MAX_Py_ssize_t / (1024 ** 3)))
else:
return f(self)
return wrapper
#=======================================================================
# unittest integration.
class BasicTestRunner:
def run(self, test):
result = unittest.TestResult()
test(result)
return result
def _id(obj):
return obj
def requires_resource(resource):
if resource == 'gui' and not _is_gui_available():
return unittest.skip(_is_gui_available.reason)
if is_resource_enabled(resource):
return _id
else:
return unittest.skip("resource {0!r} is not enabled".format(resource))
def cpython_only(test):
"""
Decorator for tests only applicable on CPython.
"""
return impl_detail(cpython=True)(test)
def impl_detail(msg=None, **guards):
if check_impl_detail(**guards):
return _id
if msg is None:
guardnames, default = _parse_guards(guards)
if default:
msg = "implementation detail not available on {0}"
else:
msg = "implementation detail specific to {0}"
guardnames = sorted(guardnames.keys())
msg = msg.format(' or '.join(guardnames))
return unittest.skip(msg)
def _parse_guards(guards):
# Returns a tuple ({platform_name: run_me}, default_value)
if not guards:
return ({'cpython': True}, False)
is_true = list(guards.values())[0]
assert list(guards.values()) == [is_true] * len(guards) # all True or all False
return (guards, not is_true)
# Use the following check to guard CPython's implementation-specific tests --
# or to run them only on the implementation(s) guarded by the arguments.
def check_impl_detail(**guards):
"""This function returns True or False depending on the host platform.
Examples:
if check_impl_detail(): # only on CPython (default)
if check_impl_detail(jython=True): # only on Jython
if check_impl_detail(cpython=False): # everywhere except on CPython
"""
guards, default = _parse_guards(guards)
return guards.get(sys.implementation.name, default)
def no_tracing(func):
"""Decorator to temporarily turn off tracing for the duration of a test."""
if not hasattr(sys, 'gettrace'):
return func
else:
@functools.wraps(func)
def wrapper(*args, **kwargs):
original_trace = sys.gettrace()
try:
sys.settrace(None)
return func(*args, **kwargs)
finally:
sys.settrace(original_trace)
return wrapper
def refcount_test(test):
"""Decorator for tests which involve reference counting.
To start, the decorator does not run the test if is not run by CPython.
After that, any trace function is unset during the test to prevent
unexpected refcounts caused by the trace function.
"""
return no_tracing(cpython_only(test))
def _filter_suite(suite, pred):
"""Recursively filter test cases in a suite based on a predicate."""
newtests = []
for test in suite._tests:
if isinstance(test, unittest.TestSuite):
_filter_suite(test, pred)
newtests.append(test)
else:
if pred(test):
newtests.append(test)
suite._tests = newtests
def _run_suite(suite):
"""Run tests from a unittest.TestSuite-derived class."""
runner = get_test_runner(sys.stdout,
verbosity=verbose,
capture_output=(junit_xml_list is not None))
result = runner.run(suite)
if junit_xml_list is not None:
junit_xml_list.append(result.get_xml_element())
if not result.testsRun and not result.skipped:
raise TestDidNotRun
if not result.wasSuccessful():
if len(result.errors) == 1 and not result.failures:
err = result.errors[0][1]
elif len(result.failures) == 1 and not result.errors:
err = result.failures[0][1]
else:
err = "multiple errors occurred"
if not verbose: err += "; run in verbose mode for details"
raise TestFailed(err)
# By default, don't filter tests
_match_test_func = None
_accept_test_patterns = None
_ignore_test_patterns = None
def match_test(test):
# Function used by support.run_unittest() and regrtest --list-cases
if _match_test_func is None:
return True
else:
return _match_test_func(test.id())
def _is_full_match_test(pattern):
# If a pattern contains at least one dot, it's considered
# as a full test identifier.
# Example: 'test.test_os.FileTests.test_access'.
#
# ignore patterns which contain fnmatch patterns: '*', '?', '[...]'
# or '[!...]'. For example, ignore 'test_access*'.
return ('.' in pattern) and (not re.search(r'[?*\[\]]', pattern))
def set_match_tests(accept_patterns=None, ignore_patterns=None):
global _match_test_func, _accept_test_patterns, _ignore_test_patterns
if accept_patterns is None:
accept_patterns = ()
if ignore_patterns is None:
ignore_patterns = ()
accept_func = ignore_func = None
if accept_patterns != _accept_test_patterns:
accept_patterns, accept_func = _compile_match_function(accept_patterns)
if ignore_patterns != _ignore_test_patterns:
ignore_patterns, ignore_func = _compile_match_function(ignore_patterns)
# Create a copy since patterns can be mutable and so modified later
_accept_test_patterns = tuple(accept_patterns)
_ignore_test_patterns = tuple(ignore_patterns)
if accept_func is not None or ignore_func is not None:
def match_function(test_id):
accept = True
ignore = False
if accept_func:
accept = accept_func(test_id)
if ignore_func:
ignore = ignore_func(test_id)
return accept and not ignore
_match_test_func = match_function
def _compile_match_function(patterns):
if not patterns:
func = None
# set_match_tests(None) behaves as set_match_tests(())
patterns = ()
elif all(map(_is_full_match_test, patterns)):
# Simple case: all patterns are full test identifier.
# The test.bisect_cmd utility only uses such full test identifiers.
func = set(patterns).__contains__
else:
import fnmatch
regex = '|'.join(map(fnmatch.translate, patterns))
# The search *is* case sensitive on purpose:
# don't use flags=re.IGNORECASE
regex_match = re.compile(regex).match
def match_test_regex(test_id):
if regex_match(test_id):
# The regex matches the whole identifier, for example
# 'test.test_os.FileTests.test_access'.
return True
else:
# Try to match parts of the test identifier.
# For example, split 'test.test_os.FileTests.test_access'
# into: 'test', 'test_os', 'FileTests' and 'test_access'.
return any(map(regex_match, test_id.split(".")))
func = match_test_regex
return patterns, func
def run_unittest(*classes):
"""Run tests from unittest.TestCase-derived classes."""
valid_types = (unittest.TestSuite, unittest.TestCase)
suite = unittest.TestSuite()
for cls in classes:
if isinstance(cls, str):
if cls in sys.modules:
suite.addTest(unittest.findTestCases(sys.modules[cls]))
else:
raise ValueError("str arguments must be keys in sys.modules")
elif isinstance(cls, valid_types):
suite.addTest(cls)
else:
suite.addTest(unittest.makeSuite(cls))
_filter_suite(suite, match_test)
_run_suite(suite)
#=======================================================================
# Check for the presence of docstrings.
# Rather than trying to enumerate all the cases where docstrings may be
# disabled, we just check for that directly
def _check_docstrings():
"""Just used to check if docstrings are enabled"""
MISSING_C_DOCSTRINGS = (check_impl_detail() and
sys.platform != 'win32' and
not sysconfig.get_config_var('WITH_DOC_STRINGS'))
HAVE_DOCSTRINGS = (_check_docstrings.__doc__ is not None and
not MISSING_C_DOCSTRINGS)
requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS,
"test requires docstrings")
#=======================================================================
# doctest driver.
def run_doctest(module, verbosity=None, optionflags=0):
"""Run doctest on the given module. Return (#failures, #tests).
If optional argument verbosity is not specified (or is None), pass
support's belief about verbosity on to doctest. Else doctest's
usual behavior is used (it searches sys.argv for -v).
"""
import doctest
if verbosity is None:
verbosity = verbose
else:
verbosity = None
f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags)
if f:
raise TestFailed("%d of %d doctests failed" % (f, t))
if verbose:
print('doctest (%s) ... %d tests with zero failures' %
(module.__name__, t))
return f, t
#=======================================================================
# Support for saving and restoring the imported modules.
def print_warning(msg):
# bpo-39983: Print into sys.__stderr__ to display the warning even
# when sys.stderr is captured temporarily by a test
for line in msg.splitlines():
print(f"Warning -- {line}", file=sys.__stderr__, flush=True)
# Flag used by saved_test_environment of test.libregrtest.save_env,
# to check if a test modified the environment. The flag should be set to False
# before running a new test.
#
# For example, threading_helper.threading_cleanup() sets the flag is the function fails
# to cleanup threads.
environment_altered = False
def reap_children():
"""Use this function at the end of test_main() whenever sub-processes
are started. This will help ensure that no extra children (zombies)
stick around to hog resources and create problems when looking
for refleaks.
"""
global environment_altered
# Need os.waitpid(-1, os.WNOHANG): Windows is not supported
if not (hasattr(os, 'waitpid') and hasattr(os, 'WNOHANG')):
return
# Reap all our dead child processes so we don't leave zombies around.
# These hog resources and might be causing some of the buildbots to die.
while True:
try:
# Read the exit status of any child process which already completed
pid, status = os.waitpid(-1, os.WNOHANG)
except OSError:
break
if pid == 0:
break
print_warning(f"reap_children() reaped child process {pid}")
environment_altered = True
@contextlib.contextmanager
def swap_attr(obj, attr, new_val):
"""Temporary swap out an attribute with a new object.
Usage:
with swap_attr(obj, "attr", 5):
...
This will set obj.attr to 5 for the duration of the with: block,
restoring the old value at the end of the block. If `attr` doesn't
exist on `obj`, it will be created and then deleted at the end of the
block.
The old value (or None if it doesn't exist) will be assigned to the
target of the "as" clause, if there is one.
"""
if hasattr(obj, attr):
real_val = getattr(obj, attr)
setattr(obj, attr, new_val)
try:
yield real_val
finally:
setattr(obj, attr, real_val)
else:
setattr(obj, attr, new_val)
try:
yield
finally:
if hasattr(obj, attr):
delattr(obj, attr)
@contextlib.contextmanager
def swap_item(obj, item, new_val):
"""Temporary swap out an item with a new object.
Usage:
with swap_item(obj, "item", 5):
...
This will set obj["item"] to 5 for the duration of the with: block,
restoring the old value at the end of the block. If `item` doesn't
exist on `obj`, it will be created and then deleted at the end of the
block.
The old value (or None if it doesn't exist) will be assigned to the
target of the "as" clause, if there is one.
"""
if item in obj:
real_val = obj[item]
obj[item] = new_val
try:
yield real_val
finally:
obj[item] = real_val
else:
obj[item] = new_val
try:
yield
finally:
if item in obj:
del obj[item]
def args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
settings in sys.flags and sys.warnoptions."""
import subprocess
return subprocess._args_from_interpreter_flags()
def optim_args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
optimization settings in sys.flags."""
import subprocess
return subprocess._optim_args_from_interpreter_flags()
class Matcher(object):
_partial_matches = ('msg', 'message')
def matches(self, d, **kwargs):
"""
Try to match a single dict with the supplied arguments.
Keys whose values are strings and which are in self._partial_matches
will be checked for partial (i.e. substring) matches. You can extend
this scheme to (for example) do regular expression matching, etc.
"""
result = True
for k in kwargs:
v = kwargs[k]
dv = d.get(k)
if not self.match_value(k, dv, v):
result = False
break
return result
def match_value(self, k, dv, v):
"""
Try to match a single stored value (dv) with a supplied value (v).
"""
if type(v) != type(dv):
result = False
elif type(dv) is not str or k not in self._partial_matches:
result = (v == dv)
else:
result = dv.find(v) >= 0
return result
_buggy_ucrt = None
def skip_if_buggy_ucrt_strfptime(test):
"""
Skip decorator for tests that use buggy strptime/strftime
If the UCRT bugs are present time.localtime().tm_zone will be
an empty string, otherwise we assume the UCRT bugs are fixed
See bpo-37552 [Windows] strptime/strftime return invalid
results with UCRT version 17763.615
"""
import locale
global _buggy_ucrt
if _buggy_ucrt is None:
if(sys.platform == 'win32' and
locale.getdefaultlocale()[1] == 'cp65001' and
time.localtime().tm_zone == ''):
_buggy_ucrt = True
else:
_buggy_ucrt = False
return unittest.skip("buggy MSVC UCRT strptime/strftime")(test) if _buggy_ucrt else test
class PythonSymlink:
"""Creates a symlink for the current Python executable"""
def __init__(self, link=None):
from .os_helper import TESTFN
self.link = link or os.path.abspath(TESTFN)
self._linked = []
self.real = os.path.realpath(sys.executable)
self._also_link = []
self._env = None
self._platform_specific()
def _platform_specific(self):
pass
if sys.platform == "win32":
def _platform_specific(self):
import glob
import _winapi
if os.path.lexists(self.real) and not os.path.exists(self.real):
# App symlink appears to not exist, but we want the
# real executable here anyway
self.real = _winapi.GetModuleFileName(0)
dll = _winapi.GetModuleFileName(sys.dllhandle)
src_dir = os.path.dirname(dll)
dest_dir = os.path.dirname(self.link)
self._also_link.append((
dll,
os.path.join(dest_dir, os.path.basename(dll))
))
for runtime in glob.glob(os.path.join(glob.escape(src_dir), "vcruntime*.dll")):
self._also_link.append((
runtime,
os.path.join(dest_dir, os.path.basename(runtime))
))
self._env = {k.upper(): os.getenv(k) for k in os.environ}
self._env["PYTHONHOME"] = os.path.dirname(self.real)
if sysconfig.is_python_build(True):
self._env["PYTHONPATH"] = os.path.dirname(os.__file__)
def __enter__(self):
os.symlink(self.real, self.link)
self._linked.append(self.link)
for real, link in self._also_link:
os.symlink(real, link)
self._linked.append(link)
return self
def __exit__(self, exc_type, exc_value, exc_tb):
for link in self._linked:
try:
os.remove(link)
except IOError as ex:
if verbose:
print("failed to clean up {}: {}".format(link, ex))
def _call(self, python, args, env, returncode):
import subprocess
cmd = [python, *args]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env)
r = p.communicate()
if p.returncode != returncode:
if verbose:
print(repr(r[0]))
print(repr(r[1]), file=sys.stderr)
raise RuntimeError(
'unexpected return code: {0} (0x{0:08X})'.format(p.returncode))
return r
def call_real(self, *args, returncode=0):
return self._call(self.real, args, None, returncode)
def call_link(self, *args, returncode=0):
return self._call(self.link, args, self._env, returncode)
def skip_if_pgo_task(test):
"""Skip decorator for tests not run in (non-extended) PGO task"""
ok = not PGO or PGO_EXTENDED
msg = "Not run for (non-extended) PGO task"
return test if ok else unittest.skip(msg)(test)
def detect_api_mismatch(ref_api, other_api, *, ignore=()):
"""Returns the set of items in ref_api not in other_api, except for a
defined list of items to be ignored in this check.
By default this skips private attributes beginning with '_' but
includes all magic methods, i.e. those starting and ending in '__'.
"""
missing_items = set(dir(ref_api)) - set(dir(other_api))
if ignore:
missing_items -= set(ignore)
missing_items = set(m for m in missing_items
if not m.startswith('_') or m.endswith('__'))
return missing_items
def skip_if_restricted_mkfifo(test):
"""Skip decorator for tests that require POSIX-defined mkfifo"""
msg = "Requires POSIX-defined mkfifo implementation"
"""VxWorks mkfifo() has a restriction: the path argument specified
with the mkfifo() call must point to the path on the pipe device '/fifos'
"""
if is_vxworks:
restricted = True
else:
restricted = False
return unittest.skip(msg)(test) if restricted else test
def check__all__(test_case, module, name_of_module=None, extra=(),
not_exported=()):
"""Assert that the __all__ variable of 'module' contains all public names.
The module's public names (its API) are detected automatically based on
whether they match the public name convention and were defined in
'module'.
The 'name_of_module' argument can specify (as a string or tuple thereof)
what module(s) an API could be defined in in order to be detected as a
public API. One case for this is when 'module' imports part of its public
API from other modules, possibly a C backend (like 'csv' and its '_csv').
The 'extra' argument can be a set of names that wouldn't otherwise be
automatically detected as "public", like objects without a proper
'__module__' attribute. If provided, it will be added to the
automatically detected ones.
The 'not_exported' argument can be a set of names that must not be treated
as part of the public API even though their names indicate otherwise.
Usage:
import bar
import foo
import unittest
from test import support
class MiscTestCase(unittest.TestCase):
def test__all__(self):
support.check__all__(self, foo)
class OtherTestCase(unittest.TestCase):
def test__all__(self):
extra = {'BAR_CONST', 'FOO_CONST'}
not_exported = {'baz'} # Undocumented name.
# bar imports part of its API from _bar.
support.check__all__(self, bar, ('bar', '_bar'),
extra=extra, not_exported=not_exported)
"""
if name_of_module is None:
name_of_module = (module.__name__, )
elif isinstance(name_of_module, str):
name_of_module = (name_of_module, )
expected = set(extra)
for name in dir(module):
if name.startswith('_') or name in not_exported:
continue
obj = getattr(module, name)
if (getattr(obj, '__module__', None) in name_of_module or
(not hasattr(obj, '__module__') and
not isinstance(obj, types.ModuleType))):
expected.add(name)
test_case.assertCountEqual(module.__all__, expected)
def suppress_msvcrt_asserts(verbose=False):
try:
import msvcrt
except ImportError:
return
msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS
| msvcrt.SEM_NOALIGNMENTFAULTEXCEPT
| msvcrt.SEM_NOGPFAULTERRORBOX
| msvcrt.SEM_NOOPENFILEERRORBOX)
# CrtSetReportMode() is only available in debug build
if hasattr(msvcrt, 'CrtSetReportMode'):
for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]:
if verbose:
msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE)
msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR)
else:
msvcrt.CrtSetReportMode(m, 0)
class SuppressCrashReport:
"""Try to prevent a crash report from popping up.
On Windows, don't display the Windows Error Reporting dialog. On UNIX,
disable the creation of coredump file.
"""
old_value = None
old_modes = None
def __enter__(self):
"""On Windows, disable Windows Error Reporting dialogs using
SetErrorMode() and CrtSetReportMode().
On UNIX, try to save the previous core file size limit, then set
soft limit to 0.
"""
if sys.platform.startswith('win'):
# see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx
try:
import msvcrt
except ImportError:
return
self.old_value = msvcrt.GetErrorMode()
msvcrt.SetErrorMode(self.old_value | msvcrt.SEM_NOGPFAULTERRORBOX)
# bpo-23314: Suppress assert dialogs in debug builds.
# CrtSetReportMode() is only available in debug build.
if hasattr(msvcrt, 'CrtSetReportMode'):
self.old_modes = {}
for report_type in [msvcrt.CRT_WARN,
msvcrt.CRT_ERROR,
msvcrt.CRT_ASSERT]:
old_mode = msvcrt.CrtSetReportMode(report_type,
msvcrt.CRTDBG_MODE_FILE)
old_file = msvcrt.CrtSetReportFile(report_type,
msvcrt.CRTDBG_FILE_STDERR)
self.old_modes[report_type] = old_mode, old_file
else:
try:
import resource
self.resource = resource
except ImportError:
self.resource = None
if self.resource is not None:
try:
self.old_value = self.resource.getrlimit(self.resource.RLIMIT_CORE)
self.resource.setrlimit(self.resource.RLIMIT_CORE,
(0, self.old_value[1]))
except (ValueError, OSError):
pass
if sys.platform == 'darwin':
import subprocess
# Check if the 'Crash Reporter' on OSX was configured
# in 'Developer' mode and warn that it will get triggered
# when it is.
#
# This assumes that this context manager is used in tests
# that might trigger the next manager.
cmd = ['/usr/bin/defaults', 'read',
'com.apple.CrashReporter', 'DialogType']
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
with proc:
stdout = proc.communicate()[0]
if stdout.strip() == b'developer':
print("this test triggers the Crash Reporter, "
"that is intentional", end='', flush=True)
return self
def __exit__(self, *ignore_exc):
"""Restore Windows ErrorMode or core file behavior to initial value."""
if self.old_value is None:
return
if sys.platform.startswith('win'):
import msvcrt
msvcrt.SetErrorMode(self.old_value)
if self.old_modes:
for report_type, (old_mode, old_file) in self.old_modes.items():
msvcrt.CrtSetReportMode(report_type, old_mode)
msvcrt.CrtSetReportFile(report_type, old_file)
else:
if self.resource is not None:
try:
self.resource.setrlimit(self.resource.RLIMIT_CORE, self.old_value)
except (ValueError, OSError):
pass
def patch(test_instance, object_to_patch, attr_name, new_value):
"""Override 'object_to_patch'.'attr_name' with 'new_value'.
Also, add a cleanup procedure to 'test_instance' to restore
'object_to_patch' value for 'attr_name'.
The 'attr_name' should be a valid attribute for 'object_to_patch'.
"""
# check that 'attr_name' is a real attribute for 'object_to_patch'
# will raise AttributeError if it does not exist
getattr(object_to_patch, attr_name)
# keep a copy of the old value
attr_is_local = False
try:
old_value = object_to_patch.__dict__[attr_name]
except (AttributeError, KeyError):
old_value = getattr(object_to_patch, attr_name, None)
else:
attr_is_local = True
# restore the value when the test is done
def cleanup():
if attr_is_local:
setattr(object_to_patch, attr_name, old_value)
else:
delattr(object_to_patch, attr_name)
test_instance.addCleanup(cleanup)
# actually override the attribute
setattr(object_to_patch, attr_name, new_value)
def run_in_subinterp(code):
"""
Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc
module is enabled.
"""
# Issue #10915, #15751: PyGILState_*() functions don't work with
# sub-interpreters, the tracemalloc module uses these functions internally
try:
import tracemalloc
except ImportError:
pass
else:
if tracemalloc.is_tracing():
raise unittest.SkipTest("run_in_subinterp() cannot be used "
"if tracemalloc module is tracing "
"memory allocations")
import _testcapi
return _testcapi.run_in_subinterp(code)
def check_free_after_iterating(test, iter, cls, args=()):
class A(cls):
def __del__(self):
nonlocal done
done = True
try:
next(it)
except StopIteration:
pass
done = False
it = iter(A(*args))
# Issue 26494: Shouldn't crash
test.assertRaises(StopIteration, next, it)
# The sequence should be deallocated just after the end of iterating
gc_collect()
test.assertTrue(done)
def missing_compiler_executable(cmd_names=[]):
"""Check if the compiler components used to build the interpreter exist.
Check for the existence of the compiler executables whose names are listed
in 'cmd_names' or all the compiler executables when 'cmd_names' is empty
and return the first missing executable or None when none is found
missing.
"""
from distutils import ccompiler, sysconfig, spawn, errors
compiler = ccompiler.new_compiler()
sysconfig.customize_compiler(compiler)
if compiler.compiler_type == "msvc":
# MSVC has no executables, so check whether initialization succeeds
try:
compiler.initialize()
except errors.DistutilsPlatformError:
return "msvc"
for name in compiler.executables:
if cmd_names and name not in cmd_names:
continue
cmd = getattr(compiler, name)
if cmd_names:
assert cmd is not None, \
"the '%s' executable is not configured" % name
elif not cmd:
continue
if spawn.find_executable(cmd[0]) is None:
return cmd[0]
_is_android_emulator = None
def setswitchinterval(interval):
# Setting a very low gil interval on the Android emulator causes python
# to hang (issue #26939).
minimum_interval = 1e-5
if is_android and interval < minimum_interval:
global _is_android_emulator
if _is_android_emulator is None:
import subprocess
_is_android_emulator = (subprocess.check_output(
['getprop', 'ro.kernel.qemu']).strip() == b'1')
if _is_android_emulator:
interval = minimum_interval
return sys.setswitchinterval(interval)
@contextlib.contextmanager
def disable_faulthandler():
import faulthandler
# use sys.__stderr__ instead of sys.stderr, since regrtest replaces
# sys.stderr with a StringIO which has no file descriptor when a test
# is run with -W/--verbose3.
fd = sys.__stderr__.fileno()
is_enabled = faulthandler.is_enabled()
try:
faulthandler.disable()
yield
finally:
if is_enabled:
faulthandler.enable(file=fd, all_threads=True)
class SaveSignals:
"""
Save and restore signal handlers.
This class is only able to save/restore signal handlers registered
by the Python signal module: see bpo-13285 for "external" signal
handlers.
"""
def __init__(self):
import signal
self.signal = signal
self.signals = signal.valid_signals()
# SIGKILL and SIGSTOP signals cannot be ignored nor caught
for signame in ('SIGKILL', 'SIGSTOP'):
try:
signum = getattr(signal, signame)
except AttributeError:
continue
self.signals.remove(signum)
self.handlers = {}
def save(self):
for signum in self.signals:
handler = self.signal.getsignal(signum)
if handler is None:
# getsignal() returns None if a signal handler was not
# registered by the Python signal module,
# and the handler is not SIG_DFL nor SIG_IGN.
#
# Ignore the signal: we cannot restore the handler.
continue
self.handlers[signum] = handler
def restore(self):
for signum, handler in self.handlers.items():
self.signal.signal(signum, handler)
def with_pymalloc():
import _testcapi
return _testcapi.WITH_PYMALLOC
class _ALWAYS_EQ:
"""
Object that is equal to anything.
"""
def __eq__(self, other):
return True
def __ne__(self, other):
return False
ALWAYS_EQ = _ALWAYS_EQ()
class _NEVER_EQ:
"""
Object that is not equal to anything.
"""
def __eq__(self, other):
return False
def __ne__(self, other):
return True
def __hash__(self):
return 1
NEVER_EQ = _NEVER_EQ()
@functools.total_ordering
class _LARGEST:
"""
Object that is greater than anything (except itself).
"""
def __eq__(self, other):
return isinstance(other, _LARGEST)
def __lt__(self, other):
return False
LARGEST = _LARGEST()
@functools.total_ordering
class _SMALLEST:
"""
Object that is less than anything (except itself).
"""
def __eq__(self, other):
return isinstance(other, _SMALLEST)
def __gt__(self, other):
return False
SMALLEST = _SMALLEST()
def maybe_get_event_loop_policy():
"""Return the global event loop policy if one is set, else return None."""
import asyncio.events
return asyncio.events._event_loop_policy
# Helpers for testing hashing.
NHASHBITS = sys.hash_info.width # number of bits in hash() result
assert NHASHBITS in (32, 64)
# Return mean and sdev of number of collisions when tossing nballs balls
# uniformly at random into nbins bins. By definition, the number of
# collisions is the number of balls minus the number of occupied bins at
# the end.
def collision_stats(nbins, nballs):
n, k = nbins, nballs
# prob a bin empty after k trials = (1 - 1/n)**k
# mean # empty is then n * (1 - 1/n)**k
# so mean # occupied is n - n * (1 - 1/n)**k
# so collisions = k - (n - n*(1 - 1/n)**k)
#
# For the variance:
# n*(n-1)*(1-2/n)**k + meanempty - meanempty**2 =
# n*(n-1)*(1-2/n)**k + meanempty * (1 - meanempty)
#
# Massive cancellation occurs, and, e.g., for a 64-bit hash code
# 1-1/2**64 rounds uselessly to 1.0. Rather than make heroic (and
# error-prone) efforts to rework the naive formulas to avoid those,
# we use the `decimal` module to get plenty of extra precision.
#
# Note: the exact values are straightforward to compute with
# rationals, but in context that's unbearably slow, requiring
# multi-million bit arithmetic.
import decimal
with decimal.localcontext() as ctx:
bits = n.bit_length() * 2 # bits in n**2
# At least that many bits will likely cancel out.
# Use that many decimal digits instead.
ctx.prec = max(bits, 30)
dn = decimal.Decimal(n)
p1empty = ((dn - 1) / dn) ** k
meanempty = n * p1empty
occupied = n - meanempty
collisions = k - occupied
var = dn*(dn-1)*((dn-2)/dn)**k + meanempty * (1 - meanempty)
return float(collisions), float(var.sqrt())
class catch_unraisable_exception:
"""
Context manager catching unraisable exception using sys.unraisablehook.
Storing the exception value (cm.unraisable.exc_value) creates a reference
cycle. The reference cycle is broken explicitly when the context manager
exits.
Storing the object (cm.unraisable.object) can resurrect it if it is set to
an object which is being finalized. Exiting the context manager clears the
stored object.
Usage:
with support.catch_unraisable_exception() as cm:
# code creating an "unraisable exception"
...
# check the unraisable exception: use cm.unraisable
...
# cm.unraisable attribute no longer exists at this point
# (to break a reference cycle)
"""
def __init__(self):
self.unraisable = None
self._old_hook = None
def _hook(self, unraisable):
# Storing unraisable.object can resurrect an object which is being
# finalized. Storing unraisable.exc_value creates a reference cycle.
self.unraisable = unraisable
def __enter__(self):
self._old_hook = sys.unraisablehook
sys.unraisablehook = self._hook
return self
def __exit__(self, *exc_info):
sys.unraisablehook = self._old_hook
del self.unraisable
def wait_process(pid, *, exitcode, timeout=None):
"""
Wait until process pid completes and check that the process exit code is
exitcode.
Raise an AssertionError if the process exit code is not equal to exitcode.
If the process runs longer than timeout seconds (SHORT_TIMEOUT by default),
kill the process (if signal.SIGKILL is available) and raise an
AssertionError. The timeout feature is not available on Windows.
"""
if os.name != "nt":
import signal
if timeout is None:
timeout = SHORT_TIMEOUT
t0 = time.monotonic()
sleep = 0.001
max_sleep = 0.1
while True:
pid2, status = os.waitpid(pid, os.WNOHANG)
if pid2 != 0:
break
# process is still running
dt = time.monotonic() - t0
if dt > SHORT_TIMEOUT:
try:
os.kill(pid, signal.SIGKILL)
os.waitpid(pid, 0)
except OSError:
# Ignore errors like ChildProcessError or PermissionError
pass
raise AssertionError(f"process {pid} is still running "
f"after {dt:.1f} seconds")
sleep = min(sleep * 2, max_sleep)
time.sleep(sleep)
else:
# Windows implementation
pid2, status = os.waitpid(pid, 0)
exitcode2 = os.waitstatus_to_exitcode(status)
if exitcode2 != exitcode:
raise AssertionError(f"process {pid} exited with code {exitcode2}, "
f"but exit code {exitcode} is expected")
# sanity check: it should not fail in practice
if pid2 != pid:
raise AssertionError(f"pid {pid2} != pid {pid}")
def skip_if_broken_multiprocessing_synchronize():
"""
Skip tests if the multiprocessing.synchronize module is missing, if there
is no available semaphore implementation, or if creating a lock raises an
OSError (on Linux only).
"""
from .import_helper import import_module
# Skip tests if the _multiprocessing extension is missing.
import_module('_multiprocessing')
# Skip tests if there is no available semaphore implementation:
# multiprocessing.synchronize requires _multiprocessing.SemLock.
synchronize = import_module('multiprocessing.synchronize')
if sys.platform == "linux":
try:
# bpo-38377: On Linux, creating a semaphore fails with OSError
# if the current user does not have the permission to create
# a file in /dev/shm/ directory.
synchronize.Lock(ctx=None)
except OSError as exc:
raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}")
| 33.7095
| 92
| 0.618268
|
ebc2a756a27ca3b98792245bfc3ea9bcffce1fab
| 1,635
|
py
|
Python
|
allennlp/tests/models/coreference_resolution/coref_test.py
|
tianjianjiang/allennlp
|
0839f5c263911ec5ff04a2ebe575493c7e0436ef
|
[
"Apache-2.0"
] | 2
|
2019-12-03T20:04:56.000Z
|
2021-03-29T10:38:06.000Z
|
allennlp/tests/models/coreference_resolution/coref_test.py
|
dasguptar/allennlp
|
35b285585e0677b1025eac1c19b5eefe7e2a70db
|
[
"Apache-2.0"
] | null | null | null |
allennlp/tests/models/coreference_resolution/coref_test.py
|
dasguptar/allennlp
|
35b285585e0677b1025eac1c19b5eefe7e2a70db
|
[
"Apache-2.0"
] | 2
|
2019-12-04T16:55:13.000Z
|
2019-12-06T18:47:15.000Z
|
import torch
from allennlp.common.testing import ModelTestCase
class CorefTest(ModelTestCase):
def setUp(self):
super().setUp()
self.set_up_model(
self.FIXTURES_ROOT / "coref" / "experiment.json",
self.FIXTURES_ROOT / "coref" / "coref.gold_conll",
)
def test_coref_model_can_train_save_and_load(self):
self.ensure_model_can_train_save_and_load(self.param_file)
def test_decode(self):
spans = torch.LongTensor([[1, 2], [3, 4], [3, 7], [5, 6], [14, 56], [17, 80]])
antecedent_indices = torch.LongTensor(
[
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0],
[2, 1, 0, 0, 0, 0],
[3, 2, 1, 0, 0, 0],
[4, 3, 2, 1, 0, 0],
]
)
spans = spans.unsqueeze(0)
antecedent_indices = antecedent_indices
# Indices into ``antecedent_indices`` indicating the predicted antecedent
# index in ``top_spans``.
predicted_antecedents = torch.LongTensor([-1, 0, -1, -1, 1, 3])
predicted_antecedents = predicted_antecedents.unsqueeze(0)
output_dict = {
"top_spans": spans,
"antecedent_indices": antecedent_indices,
"predicted_antecedents": predicted_antecedents,
}
output = self.model.decode(output_dict)
clusters = output["clusters"][0]
gold1 = [(1, 2), (3, 4), (17, 80)]
gold2 = [(3, 7), (14, 56)]
assert len(clusters) == 2
assert gold1 in clusters
assert gold2 in clusters
| 31.442308
| 86
| 0.541896
|
58bc5241d3219785674bc714c17f316ebc47a7e1
| 3,422
|
py
|
Python
|
tests/dtypes_test.py
|
rappie/vaex
|
7f49d3864a5c969105511a07526894c20a22d9bf
|
[
"MIT"
] | 1
|
2022-01-10T09:04:31.000Z
|
2022-01-10T09:04:31.000Z
|
tests/dtypes_test.py
|
rappie/vaex
|
7f49d3864a5c969105511a07526894c20a22d9bf
|
[
"MIT"
] | null | null | null |
tests/dtypes_test.py
|
rappie/vaex
|
7f49d3864a5c969105511a07526894c20a22d9bf
|
[
"MIT"
] | null | null | null |
from common import *
from vaex.datatype import DataType
from unittest.mock import MagicMock
def test_dtype_basics(df):
df['new_virtual_column'] = df.x + 1
for name in df.get_column_names():
if df.is_string(name):
assert df[name].to_numpy().dtype.kind in 'OSU'
else:
assert vaex.array_types.same_type(DataType(vaex.array_types.data_type(df[name].values)), df.data_type(df[name]))
def test_dtypes(df_local):
df = df_local
assert [df.dtypes[name] for name in df.get_column_names()] == [df[name].data_type() for name in df.get_column_names()]
def test_dtype_arrow():
l = pa.array([[1,2], [2,3,4]])
df = vaex.from_arrays(l=l)
assert df.data_type(df.l) == pa.list_(l.type.value_type)
def test_dtype_str():
df = vaex.from_arrays(x=["foo", "bars"], y=[1,2])
assert df.data_type(df.x) == pa.string()
assert df.data_type(df.x, array_type='arrow') == pa.string()
df['s'] = df.y.apply(lambda x: str(x))
assert df.data_type(df.x) == pa.string()
assert df.data_type(df.s) == pa.string()
assert df.data_type(df.x, array_type='arrow') == pa.string()
assert df.data_type(df.s, array_type='arrow') == pa.string()
assert df.data_type(df.x.as_arrow(), array_type=None) == pa.string()
assert df.data_type(df.x.as_arrow(), array_type='arrow') == pa.string()
assert df.data_type(df.x.as_arrow(), array_type='numpy') == object
n = np.array(['aap', 'noot'])
assert vaex.from_arrays(n=n).n.dtype == pa.string()
n = np.array([np.nan, 'aap', 'noot'], dtype=object)
df = vaex.from_arrays(n=n)
assert df.n.dtype == pa.string()
assert df.copy().n.dtype == pa.string()
n = np.array([None, 'aap', 'noot'])
df = vaex.from_arrays(n=n)
assert df.n.dtype == pa.string()
assert df.copy().n.dtype == pa.string()
def test_dtype_str_invalid_identifier():
df = vaex.from_dict({'#': ['foo']})
assert df.data_type('#') == 'string'
assert df.data_type('#', array_type='numpy') == 'object'
assert df.data_type('#', array_type='numpy-arrow') == 'string'
assert df['#'].dtype == 'string'
def test_dtype_str_virtual_column():
df = vaex.from_dict({'s': ['foo']})
df['v'] = df.s.str.lower()
assert df.data_type('v') == 'string'
assert df.data_type('v', array_type='numpy') == 'object'
assert df.data_type('v', array_type='numpy-arrow') == 'string'
assert df['v'].dtype == 'string'
def test_dtype_nested():
data = ['aap', 'noot', None], ['app', 'noot', 'mies']
df = vaex.from_arrays(s=pa.array(data))
assert df.s.dtype == pa.list_(pa.string())
assert df.s.data_type(axis=0) == pa.list_(pa.string())
assert df.s.data_type(axis=-2) == pa.list_(pa.string())
assert df.s.data_type(axis=1) == pa.string()
assert df.s.data_type(axis=-1) == pa.string()
data = [['aap', 'noot', None], ['app', 'noot', 'mies']], [], None
df = vaex.from_arrays(s=pa.array(data))
assert df.s.dtype == pa.list_(pa.list_(pa.string()))
assert df.s.data_type(axis=-3) == pa.list_(pa.list_(pa.string()))
assert df.s.data_type(axis=-2) == pa.list_(pa.string())
assert df.s.data_type(axis=-1) == pa.string()
def test_dtype_no_eval():
df = vaex.from_dict({"#": [1.1], "with space": ['should work']})
df._evaluate_implementation = MagicMock()
assert df.data_type(df['#']) == float
assert df.data_type(df['with space']) == str
| 36.795699
| 124
| 0.625658
|
5377c492bb526cd6f444d4ffd530116dca25af7a
| 3,434
|
py
|
Python
|
examples/nc03.py
|
vbajpai/ncclient
|
c19ab546e1af4fa00f1ae162d5d3865fba49a76b
|
[
"Apache-2.0"
] | 4
|
2015-10-07T21:29:03.000Z
|
2020-12-04T01:38:12.000Z
|
examples/nc03.py
|
vbajpai/ncclient
|
c19ab546e1af4fa00f1ae162d5d3865fba49a76b
|
[
"Apache-2.0"
] | null | null | null |
examples/nc03.py
|
vbajpai/ncclient
|
c19ab546e1af4fa00f1ae162d5d3865fba49a76b
|
[
"Apache-2.0"
] | 2
|
2015-01-12T23:47:01.000Z
|
2016-10-16T09:56:40.000Z
|
#! /usr/bin/env python
#
# Copyright 2012 Vaibhav Bajpai <contact@vaibhavbajpai.com>
# Copyright 2009 Shikhar Bhushan <shikhar@schmizz.net>
#
# Retrieve a config portion selected by an XPATH expression from the
# configuration store passed on the command line using
# get-config and write the XML configs to files.
#
# $ ./nc03.py cook "aaa/authentication/users/user[name='schoenw']"
# $ ./nc03.py yuma "interfaces/interface[name='eth0']"
import sys, os, warnings, logging, argparse
warnings.simplefilter("ignore", DeprecationWarning)
from ncclient import manager
LEVELS = {
'debug':logging.DEBUG,
'info':logging.INFO,
'warning':logging.WARNING,
'error':logging.ERROR,
'critical':logging.CRITICAL,
}
def connect(host, port, user, password, source, expression):
with manager.connect(
host=host, port=port,
username=user, password=password
) as m:
assert(":xpath" in m.server_capabilities)
c = m.get_config(source, filter=('xpath', expression)).data_xml
with open("%s.xml" % host, 'w') as f:
f.write(c)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument(
'hostname',
action='store',
help='hostname or IP address'
)
parser.add_argument(
'expression',
action='store',
help='xpath expression'
)
parser.add_argument(
'--port',
action='store',
default=830,
type=int,
dest='port',
help='''port number [default: 830]'''
)
parser.add_argument(
'--logging',
action='store',
dest='level_name',
help='''debug/info/warning/error/critical
[default: critical]'''
)
parser.add_argument(
'--username',
action='store',
dest='username',
default=os.getenv('USER'),
help='username [default: %s]'%(os.getenv('USER'))
)
parser.add_argument(
'--password',
action='store',
dest='password',
help='password'
)
parser.add_argument(
'--source',
action='store',
default='running',
help='running/candidate/startup [default: running]',
dest='source',
)
results = parser.parse_args()
return results
if __name__ == '__main__':
def setlogging_level(level_name):
level = LEVELS.get(level_name, logging.CRITICAL)
logging.basicConfig(level=level)
results = parse_arguments()
setlogging_level(results.level_name)
connect(
results.hostname, results.port,
results.username, results.password,
results.source, results.expression
)
| 34.686869
| 76
| 0.481363
|
b090d4dced1a623fc66663a2d30035d7faa695e6
| 3,477
|
py
|
Python
|
scripts/addons/keentools_facebuilder/utils/images.py
|
Tilapiatsu/blender-custom_conf
|
05592fedf74e4b7075a6228b8448a5cda10f7753
|
[
"MIT"
] | 2
|
2020-04-16T22:12:40.000Z
|
2022-01-22T17:18:45.000Z
|
scripts/addons/keentools_facebuilder/utils/images.py
|
Tilapiatsu/blender-custom_conf
|
05592fedf74e4b7075a6228b8448a5cda10f7753
|
[
"MIT"
] | null | null | null |
scripts/addons/keentools_facebuilder/utils/images.py
|
Tilapiatsu/blender-custom_conf
|
05592fedf74e4b7075a6228b8448a5cda10f7753
|
[
"MIT"
] | 2
|
2019-05-16T04:01:09.000Z
|
2020-08-25T11:42:26.000Z
|
# ##### BEGIN GPL LICENSE BLOCK #####
# KeenTools for blender is a blender addon for using KeenTools in Blender.
# Copyright (C) 2019 KeenTools
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
# ##### END GPL LICENSE BLOCK #####
import logging
import numpy as np
import bpy
def find_bpy_image_by_name(image_name):
image_num = bpy.data.images.find(image_name)
if image_num >= 0:
return bpy.data.images[image_num]
return None
def remove_bpy_image(image):
if image and image in bpy.data.images:
bpy.data.images.remove(image)
def remove_bpy_image_by_name(image_name):
image = find_bpy_image_by_name(image_name)
if image is not None:
bpy.data.images.remove(image)
def store_bpy_image_in_scene(image):
image.pack()
image.use_fake_user = True
def add_alpha_channel(np_image_array):
return np.dstack((np_image_array, np.ones(np_image_array.shape[:2])))
def check_bpy_image_size(image):
if not image or not image.size:
return False
w, h = image.size[:2]
return w > 0 and h > 0
def check_bpy_image_has_same_size(image, size):
if not image or not image.size:
return False
w, h = image.size[:2]
return w == size[0] and h == size[1]
def safe_bpy_image_loading(blender_name, path):
tex = find_bpy_image_by_name(blender_name)
if tex is not None:
if check_bpy_image_size(tex):
return tex
else:
remove_bpy_image_by_name(blender_name)
try:
image = bpy.data.images.load(path)
image.name = blender_name
except Exception:
logger = logging.getLogger(__name__)
logger.error('Source texture for "{}" '
'is not found on path: {}'.format(blender_name, path))
return None
if not check_bpy_image_size(image):
return None
return image
def safe_bpy_image_in_scene_loading(blender_name, path):
logger = logging.getLogger(__name__)
tex = find_bpy_image_by_name(blender_name)
if tex is not None:
if check_bpy_image_size(tex):
return tex
else:
remove_bpy_image_by_name(blender_name)
try:
image = bpy.data.images.load(path)
except Exception:
logger.error('Source texture for "{}" '
'is not found on path: {}'.format(blender_name, path))
return None
if not check_bpy_image_size(image):
bpy.data.images.remove(image)
logger.error('Source texture "{}" '
'has wrong format on path: {}'.format(blender_name, path))
return None
tex = bpy.data.images.new(blender_name,
width=image.size[0], height=image.size[1],
alpha=True, float_buffer=False)
tex.pixels[:] = image.pixels[:]
store_bpy_image_in_scene(tex)
bpy.data.images.remove(image)
return tex
| 31.324324
| 79
| 0.665804
|
8344013282e83708d865cb97e92cb0533ce5c2ae
| 7,885
|
py
|
Python
|
imcsdk/mometa/system/SystemIOControllerNVMe.py
|
ecoen66/imcsdk
|
b10eaa926a5ee57cea7182ae0adc8dd1c818b0ab
|
[
"Apache-2.0"
] | 31
|
2016-06-14T07:23:59.000Z
|
2021-09-12T17:17:26.000Z
|
imcsdk/mometa/system/SystemIOControllerNVMe.py
|
sthagen/imcsdk
|
1831eaecb5960ca03a8624b1579521749762b932
|
[
"Apache-2.0"
] | 109
|
2016-05-25T03:56:56.000Z
|
2021-10-18T02:58:12.000Z
|
imcsdk/mometa/system/SystemIOControllerNVMe.py
|
sthagen/imcsdk
|
1831eaecb5960ca03a8624b1579521749762b932
|
[
"Apache-2.0"
] | 67
|
2016-05-17T05:53:56.000Z
|
2022-03-24T15:52:53.000Z
|
"""This module contains the general information for SystemIOControllerNVMe ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class SystemIOControllerNVMeConsts:
pass
class SystemIOControllerNVMe(ManagedObject):
"""This is SystemIOControllerNVMe class."""
consts = SystemIOControllerNVMeConsts()
naming_props = set(['id'])
mo_meta = {
"modular": MoMeta("SystemIOControllerNVMe", "systemIOControllerNVMe", "sioc-NVMe-[id]", VersionMeta.Version404b, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], ['equipmentSystemIOController'], ['faultInst', 'ioControllerNVMePhysicalDrive'], [None])
}
prop_meta = {
"modular": {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version404b, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"component_id": MoPropertyMeta("component_id", "componentId", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"controller_chip_temp_celsius": MoPropertyMeta("controller_chip_temp_celsius", "controllerChipTempCelsius", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"controller_status": MoPropertyMeta("controller_status", "controllerStatus", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []),
"drive_count": MoPropertyMeta("drive_count", "driveCount", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"drive_life_used": MoPropertyMeta("drive_life_used", "driveLifeUsed", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"health": MoPropertyMeta("health", "health", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"id": MoPropertyMeta("id", "id", "string", VersionMeta.Version404b, MoPropertyMeta.NAMING, None, 0, 510, None, [], []),
"led_fault_status": MoPropertyMeta("led_fault_status", "ledFaultStatus", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"link_status": MoPropertyMeta("link_status", "linkStatus", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"model": MoPropertyMeta("model", "model", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"p2p_device_id": MoPropertyMeta("p2p_device_id", "p2pDeviceId", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"p2p_vendor_id": MoPropertyMeta("p2p_vendor_id", "p2pVendorId", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"pending_firmware_version": MoPropertyMeta("pending_firmware_version", "pendingFirmwareVersion", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"percentage_total_power_on_hour": MoPropertyMeta("percentage_total_power_on_hour", "percentageTotalPowerOnHour", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"performance_level": MoPropertyMeta("performance_level", "performanceLevel", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"product_id": MoPropertyMeta("product_id", "productId", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"product_revision": MoPropertyMeta("product_revision", "productRevision", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []),
"running_firmware_version": MoPropertyMeta("running_firmware_version", "runningFirmwareVersion", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"serial": MoPropertyMeta("serial", "serial", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"shutdown_temperature": MoPropertyMeta("shutdown_temperature", "shutdownTemperature", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"switch_status": MoPropertyMeta("switch_status", "switchStatus", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"temperature": MoPropertyMeta("temperature", "temperature", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"vendor_id": MoPropertyMeta("vendor_id", "vendorId", "string", VersionMeta.Version404b, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
},
}
prop_map = {
"modular": {
"childAction": "child_action",
"componentId": "component_id",
"controllerChipTempCelsius": "controller_chip_temp_celsius",
"controllerStatus": "controller_status",
"dn": "dn",
"driveCount": "drive_count",
"driveLifeUsed": "drive_life_used",
"health": "health",
"id": "id",
"ledFaultStatus": "led_fault_status",
"linkStatus": "link_status",
"model": "model",
"p2pDeviceId": "p2p_device_id",
"p2pVendorId": "p2p_vendor_id",
"pendingFirmwareVersion": "pending_firmware_version",
"percentageTotalPowerOnHour": "percentage_total_power_on_hour",
"performanceLevel": "performance_level",
"productId": "product_id",
"productRevision": "product_revision",
"rn": "rn",
"runningFirmwareVersion": "running_firmware_version",
"serial": "serial",
"shutdownTemperature": "shutdown_temperature",
"status": "status",
"switchStatus": "switch_status",
"temperature": "temperature",
"vendor": "vendor",
"vendorId": "vendor_id",
},
}
def __init__(self, parent_mo_or_dn, id, **kwargs):
self._dirty_mask = 0
self.id = id
self.child_action = None
self.component_id = None
self.controller_chip_temp_celsius = None
self.controller_status = None
self.drive_count = None
self.drive_life_used = None
self.health = None
self.led_fault_status = None
self.link_status = None
self.model = None
self.p2p_device_id = None
self.p2p_vendor_id = None
self.pending_firmware_version = None
self.percentage_total_power_on_hour = None
self.performance_level = None
self.product_id = None
self.product_revision = None
self.running_firmware_version = None
self.serial = None
self.shutdown_temperature = None
self.status = None
self.switch_status = None
self.temperature = None
self.vendor = None
self.vendor_id = None
ManagedObject.__init__(self, "SystemIOControllerNVMe", parent_mo_or_dn, **kwargs)
| 63.58871
| 264
| 0.653519
|
eefc7d3e0d3eb6aeca7643729e797cdf200887d7
| 1,002
|
py
|
Python
|
manage.py
|
andreffs18/flask-template-project
|
a5ed56cfc59c181d33e7147feb4fdb9a4094996e
|
[
"MIT"
] | 9
|
2017-02-08T21:42:15.000Z
|
2021-12-15T05:18:18.000Z
|
manage.py
|
andreffs18/flask-template-project
|
a5ed56cfc59c181d33e7147feb4fdb9a4094996e
|
[
"MIT"
] | 10
|
2016-07-25T11:00:08.000Z
|
2019-09-25T14:56:40.000Z
|
manage.py
|
andreffs18/flask-template-project
|
a5ed56cfc59c181d33e7147feb4fdb9a4094996e
|
[
"MIT"
] | 7
|
2016-11-01T20:11:03.000Z
|
2020-02-04T14:25:49.000Z
|
# !/usr/bin/python
# -*- coding: utf-8 -*-
from flask_script import Manager
from flask import render_template
from project import create_app
# in case we run the test command choose the "TestConfig"
import sys
arg_dict = dict((i, v) for i, v in enumerate(sys.argv))
config = "config.TestConfig" if arg_dict.get(1, None) == "test" else None
app = create_app(config)
manager = Manager(app)
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
import project.commands as cmd
manager.add_command("worker", cmd.WorkerCommand())
manager.add_command("test", cmd.TestCommand())
manager.add_command("routes", cmd.ListRoutesCommand())
manager.add_command("create-user", cmd.CreateUserCommand())
manager.add_command("delete-user", cmd.DeleteUserCommand())
manager.add_command("create-db", cmd.CreateDBCommand())
manager.run()
__version__ = '0.2.0'
__author__ = "Andre Silva"
__email__ = "andreffs18@gmail.com"
| 29.470588
| 73
| 0.726547
|
0c2a1fea9ea548bc98dd47c001203aa7237274da
| 4,623
|
py
|
Python
|
AlphaZero_book/chap3/3-4.py
|
jisuk500/ML_learning
|
4f77eb34bd652753e63fb75fa2be5bd252232f80
|
[
"Apache-2.0"
] | null | null | null |
AlphaZero_book/chap3/3-4.py
|
jisuk500/ML_learning
|
4f77eb34bd652753e63fb75fa2be5bd252232f80
|
[
"Apache-2.0"
] | null | null | null |
AlphaZero_book/chap3/3-4.py
|
jisuk500/ML_learning
|
4f77eb34bd652753e63fb75fa2be5bd252232f80
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 18 17:53:45 2021
@author: 알파제로를 분석하며 배우는 인공지능
"""
#%%
# 3-4-3 패키지 임포트
from tensorflow.keras.datasets import cifar10
from tensorflow.keras.callbacks import LearningRateScheduler
from tensorflow.keras.layers import Activation, Add, BatchNormalization, Conv2D, Dense, Dropout, GlobalAveragePooling2D, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical
import numpy as np
import matplotlib.pyplot as plt
# 데이터 세트 준비
(train_images, train_labels) , (test_images, test_labels) = cifar10.load_data()
# 데이터 세트 전처리
train_images = train_images
train_labels = to_categorical(train_labels, 10)
test_images = test_images
test_labels = to_categorical(test_labels,10)
# 데이터 세트 전처리 후 형태 확인
print(train_images.shape)
print(train_labels.shape)
print(test_images.shape)
print(test_labels.shape)
#%%
# 3-4-6 functional api 이용
# 모델 생성
#컨볼루셔널 레이어 생성
def conv(filters, kernel_size, stride=1):
return Conv2D(filters, kernel_size, strides=(stride,stride), padding='same',
use_bias=False, kernel_initializer='he_normal', kernel_regularizer=l2(0.0001))
# 레지듀얼 블록 A 생성
def first_residual_unit(filters, strides):
def f(x):
# BN -> ReLU
x = BatchNormalization()(x)
b = Activation('relu')(x)
# 컨볼루셔널 레이어
x = conv(filters // 4, 1, strides)(b)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# 컨볼루셔널 레이어 -> BN -> ReLU
x = conv(filters // 4 , 3)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# 컨볼루셔널 레이어 ->
x = conv(filters,1)(x)
# 숏컷 형태 사이즈 조정
sc = conv(filters, 1, strides)(b)
# add
return Add()([x, sc])
return f
# 레지듀얼 블록 B 생성
def residual_unit(filters):
def f(x):
sc = x
# -> BN -> ReLU
x = BatchNormalization()(x)
x = Activation('relu')(x)
# 컨볼루셔널 레이어 -> BN -> ReLU
x = conv(filters // 4, 1)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# 컨볼루셔널 레이어->
x = conv(filters,1)(x)
# add
return Add()([x, sc])
return f
# 레지듀얼 블록 A, 레지듀얼 블록 B x 17
def residual_block(filters, strides, unit_size):
def f(x):
x = first_residual_unit(filters, strides)(x)
for i in range(unit_size - 1):
x = residual_unit(filters)(x)
return x
return f
# 입력 데이터 형태
input = Input(shape=(32,32,3))
# 컨볼루셔널 레이어
x = conv(16,3)(input)
# 레지듀얼 블록 x 54
x = residual_block(64,1,18)(x)
x = residual_block(128,2,18)(x)
x = residual_block(256,2,18)(x)
# BN -> ReLU
x = BatchNormalization()(x)
x = Activation('relu')(x)
# 풀링 레이어
x = GlobalAveragePooling2D()(x)
# 전결합 레이어
output = Dense(10,activation='softmax', kernel_regularizer=l2(0.0001))(x)
#모델 생성
model = Model(inputs=input, outputs = output)
# 3-4-8 컴파일
# 컴파일
model.compile(loss='categorical_crossentropy',optimizer=SGD(momentum=0.9), metrics=['acc'])
#%%
# ImageDataGenerator 준비
train_gen = ImageDataGenerator(
featurewise_center=True,featurewise_std_normalization=True,
width_shift_range = 0.125, height_shift_range=0.125,
horizontal_flip=True)
test_gen = ImageDataGenerator(featurewise_center=True, featurewise_std_normalization=True)
for data in (train_gen, test_gen):
data.fit(train_images)
# LearningRateSchedular 준비
def step_decay(epoch):
x = 0.1
if epoch >= 80: x = 0.01
if epoch >= 120: x = 0.001
return x
lr_decay_cb = LearningRateScheduler(step_decay)
#%%
# 3-4-11 학습
batch_size = 128
history = model.fit(
train_gen.flow(
train_images,
train_labels,
batch_size=batch_size),
epochs=200,
steps_per_epoch=train_images.shape[0] // batch_size,
validation_data = test_gen.flow(
test_images,
test_labels,
batch_size = batch_size),
validation_steps = test_images.shape[0] // batch_size,
callbacks=[lr_decay_cb]
)
#%%
# 3-4-12 모델 저장
model.save("3-4-resnet.h5")
#%%
# 3-4-13 그래프 표시
plt.plot(history.history['acc'], label='acc')
plt.plot(history.history['val_acc'], label='val_acc')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(loc='best')
plt.show()
#%%
# 3-4-14 평가
batch_size = 128
test_loss , test_acc = model.evaluate_generator(
test_gen.flow(test_images, test_labels, batch_size=batch_size),
steps=10
)
print("loss: {:.3f}\nacc: {:.3f}".format(test_loss, test_acc))
| 22.441748
| 126
| 0.658447
|
9e1c96308539471696fc8a582bbd7ba3cef89913
| 1,504
|
py
|
Python
|
testapp/models.py
|
nazmul-pro/py-channels
|
709af1c84bdf1aac6112033863c3ed0af1a23eea
|
[
"MIT"
] | null | null | null |
testapp/models.py
|
nazmul-pro/py-channels
|
709af1c84bdf1aac6112033863c3ed0af1a23eea
|
[
"MIT"
] | null | null | null |
testapp/models.py
|
nazmul-pro/py-channels
|
709af1c84bdf1aac6112033863c3ed0af1a23eea
|
[
"MIT"
] | null | null | null |
# Create your models here.
from django.db import models
# from django.template.defaultfilters import slugify
# from django.contrib.auth.models import User
class AppUser(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField(max_length=75)
phone = models.TextField()
password = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
# @models.permalink
# def get_absolute_url(self):
# return ('blog_post_detail', (),
# {
# 'slug': self.slug,
# })
# def save(self, *args, **kwargs):
# if not self.slug:
# self.slug = slugify(self.title)
# super(Post, self).save(*args, **kwargs)
# class Meta:
# ordering = ['created_on']
# def __unicode__(self):
# return self.title
class AppUserInfo(models.Model):
app_user = models.ForeignKey(AppUser, on_delete=models.CASCADE)
permissions = models.TextField()
avatar = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class AppUserToken(models.Model):
app_user = models.ForeignKey(AppUser, on_delete=models.CASCADE)
jwt = models.TextField()
created_on = models.DateTimeField(auto_now_add=True)
class LiveDiscussion(models.Model):
app_user = models.ForeignKey(AppUser, on_delete=models.CASCADE)
text = models.TextField()
love = models.IntegerField()
created_on = models.DateTimeField(auto_now_add=True)
| 32.695652
| 67
| 0.667553
|
50c5f9d9dfcbd00b548ad2ecaa90cffd98596c14
| 457
|
py
|
Python
|
setup.py
|
QueoLda/django-unicorn
|
01573cd65282c467bfb0925542b180ffa9efba05
|
[
"MIT"
] | null | null | null |
setup.py
|
QueoLda/django-unicorn
|
01573cd65282c467bfb0925542b180ffa9efba05
|
[
"MIT"
] | null | null | null |
setup.py
|
QueoLda/django-unicorn
|
01573cd65282c467bfb0925542b180ffa9efba05
|
[
"MIT"
] | null | null | null |
from distutils.core import setup
from setuptools import find_packages
setup(
name="django-unicorn",
version="0.26.0",
description="A magical full-stack framework for Django.",
authors=["Adam Hill <unicorn@adamghill.com>"],
license="MIT",
readme="README.md",
repository="https://github.com/adamghill/django-unicorn/",
homepage="https://www.django-unicorn.com",
keywords=["django", "python", "javascript", "fullstack"],
)
| 30.466667
| 62
| 0.689278
|
f75fc6f8bf9507bd6c1a1e4b893b9157cb5c30e6
| 490
|
py
|
Python
|
Ejercicios/Arrays.py
|
dannieldev/Fundamentos-de-Python
|
63bf92c7256b373b631cae3ae9a80a3a5071f61d
|
[
"MIT"
] | null | null | null |
Ejercicios/Arrays.py
|
dannieldev/Fundamentos-de-Python
|
63bf92c7256b373b631cae3ae9a80a3a5071f61d
|
[
"MIT"
] | null | null | null |
Ejercicios/Arrays.py
|
dannieldev/Fundamentos-de-Python
|
63bf92c7256b373b631cae3ae9a80a3a5071f61d
|
[
"MIT"
] | null | null | null |
lis = [2,"tres",True,["uno",10,"Hola"],2,"Miau"]
rec = lis[2]
print(lis)
print(rec)
print(lis[3][2]) #Acceder a un lista dentro de una lista
lis[0] = "zero"
lis2 = lis[1:3]#copiar datos de otro array a un nuevo array
lis3 = lis[0::2]#Va intercalando dependiendo de lo deseado
lis24 = lis[1:3]#
print(lis)
print(lis2)
print(lis3)
lis[0:3] = [4,4,5] #Intercambiar datos del arrays
print(lis)
lis5 = lis[-1] #Acede desde la ultima posicion del array
print(lis5)
| 21.304348
| 60
| 0.644898
|
9680a61cbac800f8844a5c136fedebb7cfa20d16
| 1,945
|
py
|
Python
|
alphastarmini/core/rl/rl_train_HAS.py
|
liuruoze/Raw-vs-Human-in-AlphaStar
|
99acae772eb5c93000dca87b78d6acdf7699f331
|
[
"Apache-2.0"
] | 3
|
2021-09-07T11:13:34.000Z
|
2021-09-07T13:05:26.000Z
|
alphastarmini/core/rl/rl_train_HAS.py
|
liuruoze/Raw-vs-Human-in-AlphaStar
|
99acae772eb5c93000dca87b78d6acdf7699f331
|
[
"Apache-2.0"
] | null | null | null |
alphastarmini/core/rl/rl_train_HAS.py
|
liuruoze/Raw-vs-Human-in-AlphaStar
|
99acae772eb5c93000dca87b78d6acdf7699f331
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
" Train for RL by interacting with the environment"
import os
USED_DEVICES = "0"
import os
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = USED_DEVICES
os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
import traceback
from time import time, sleep
import threading
from pysc2.env.sc2_env import Race
from alphastarmini.core.rl import utils as U
from alphastarmini.core.rl.learner import Learner
from alphastarmini.core.rl.actor_HAS import ActorLoopHAS
# below packages are for test
from alphastarmini.core.ma.league import League
from alphastarmini.core.ma.coordinator import Coordinator
import param as P
__author__ = "Ruo-Ze Liu"
debug = False
def test(on_server=False, replay_path=None):
# model path
ACTOR_NUMS = P.actor_nums
RESTORE = False
model_name = "rl_21-08-04_11-08-04.pkl"
league = League(
initial_agents={
race: U.get_reinforcement_agent(race, restore=RESTORE, model_name=model_name)
for race in [Race.protoss]
},
main_players=1,
main_exploiters=0,
league_exploiters=0)
coordinator = Coordinator(league)
learners = []
actors = []
for idx in range(league.get_learning_players_num()):
player = league.get_learning_player(idx)
learner = Learner(player, max_time_for_training=60 * 60 * 24)
learners.append(learner)
actors.extend([ActorLoopHAS(player, coordinator, replay_path=replay_path) for _ in range(ACTOR_NUMS)])
threads = []
for l in learners:
l.start()
threads.append(l.thread)
sleep(1)
for a in actors:
a.start()
threads.append(a.thread)
sleep(1)
try:
# Wait for training to finish.
for t in threads:
t.join()
except Exception as e:
print("Exception Handled in Main, Detials of the Exception:", e)
| 25.592105
| 110
| 0.676607
|
76718f99c0ba7dc179c5581caea5f25c339efcc4
| 31,852
|
py
|
Python
|
tool/cstar_perf/tool/benchmark.py
|
cooldoger/cstar_perf
|
aceadd1d5a2331668647c53cca231ff9c1338eb4
|
[
"Apache-2.0"
] | 60
|
2015-01-01T03:01:40.000Z
|
2021-11-06T16:03:41.000Z
|
tool/cstar_perf/tool/benchmark.py
|
cooldoger/cstar_perf
|
aceadd1d5a2331668647c53cca231ff9c1338eb4
|
[
"Apache-2.0"
] | 182
|
2015-01-04T20:02:54.000Z
|
2021-04-23T18:16:23.000Z
|
tool/cstar_perf/tool/benchmark.py
|
datastax/cstar_perf
|
aceadd1d5a2331668647c53cca231ff9c1338eb4
|
[
"Apache-2.0"
] | 30
|
2015-03-11T14:37:38.000Z
|
2021-01-13T10:48:02.000Z
|
"""
Bootstrap Cassandra onto a cluster and benchmark stress.
"""
import subprocess
import tempfile
import os
import time
import datetime
from pprint import pprint
import uuid
import re
import json
import socket
import getpass
import logging
import itertools
import shutil
import distutils.util
from fabric.tasks import execute
import fabric.api as fab
import yaml
import sh
import shlex
# Import the default config first:r
import fab_common as common
import fab_dse as dse
import fab_cassandra as cstar
import fab_flamegraph as flamegraph
import fab_profiler as profiler
# Then import our cluster specific config:
from cluster_config import config
logging.basicConfig()
logger = logging.getLogger('benchmark')
logger.setLevel(logging.INFO)
# Ensure stdout is not truncated when a sh.Command fails
sh.ErrorReturnCode.truncate_cap = 999999
HOME = os.getenv('HOME')
CASSANDRA_STRESS_PATH = os.path.expanduser("~/fab/stress/")
CASSANDRA_STRESS_DEFAULT = os.path.expanduser("~/fab/stress/default/tools/bin/cassandra-stress")
JAVA_HOME = os.path.expanduser("~/fab/java")
CSTAR_PERF_LOGS_DIR = os.path.join(os.path.expanduser('~'), '.cstar_perf', 'logs')
antcmd = sh.Command(os.path.join(HOME, 'fab/ant/bin/ant'))
global nodetool_path, cqlsh_path
def set_nodetool_path(path):
global nodetool_path
nodetool_path = path
def set_cqlsh_path(path):
global cqlsh_path
if path.startswith('DSE_HOME'):
path = path[path.find(' ') + 1:]
cqlsh_path = path
def get_localhost():
ip = socket.gethostbyname(socket.gethostname())
return (ip, getpass.getuser() + "@" + ip)
def get_all_hosts(env):
# the local host will not be added to the cluster unless
# it has a corresponding entry in the cluster config:
hosts = list(env['hosts'])
localhost_ip, localhost_entry = get_localhost()
if localhost_ip not in [host.split(".")[0] for host in hosts]:
# Use the local username for this host, as it may be different
# than the cluster defined 'user' parameter:
hosts += [localhost_entry]
return hosts
def is_local_node(node):
for local_info in socket.gethostbyaddr(socket.gethostname()):
if isinstance(local_info, list):
for item in local_info:
if node == item:
return True
else:
continue
else:
if node == local_info:
return True
else:
continue
return False
def _parse_yaml(yaml_file):
if isinstance(yaml_file, basestring):
yaml_file = yaml.load(yaml_file)
if yaml_file is None:
yaml_file = {}
if type(yaml_file) is not dict:
raise ValueError('Invalid yaml, was expecting a dictionary: {cass_yaml}'.format(cass_yaml=yaml_file))
return yaml_file
def bootstrap(cfg=None, destroy=False, leave_data=False, git_fetch=True):
"""Deploy and start cassandra on the cluster
cfg - the cluster configuration
destroy - whether to destroy the existing build before bootstrap
leave_data - if destroy==True, leave the Cassandra data/commitlog/etc directories intact.
git_fetch - Do a git fetch before building/running C*? (Multi-revision tests should only update on the first run to maintain revision consistency in case someone checks something in mid-operation.)
Return the gid id of the branch checked out
"""
if cfg is not None:
common.setup(cfg)
# Parse yaml
if cfg.has_key('yaml'):
cass_yaml = cfg['yaml']
common.config['yaml'] = _parse_yaml(cass_yaml)
if cfg.has_key('dse_yaml'):
dse_yaml = cfg['dse_yaml']
common.config['dse_yaml'] = _parse_yaml(dse_yaml)
if cfg.has_key('options'):
if cfg['options'] is not None:
common.config.update(cfg['options'])
del common.config['options']
# Rerun setup now that additional options have been added:
common.setup(common.config)
logger.info("### Config: ###")
pprint(common.config)
# leave_data settting can be set in the revision
# configuration, or manually in the call to this function.
# Either is fine, but they shouldn't conflict. If they do,
# ValueError is raised.
if leave_data == True and cfg.get('leave_data', None) == False:
raise ValueError('setting for leave_data conflicts in job config and bootstrap() call')
else:
leave_data = bool(distutils.util.strtobool(str(cfg.get('leave_data', leave_data))))
# Set device readahead:
if cfg['blockdev_readahead'] is not None:
if len(cfg['block_devices']) == 0:
raise AssertionError('blockdev_readahead setting requires block_devices to be set in cluster_config.')
set_device_read_ahead(cfg['blockdev_readahead'])
# Destroy cassandra deployment and data:
if destroy:
execute(common.destroy, leave_data=leave_data)
execute(common.ensure_stopped)
else:
#Shutdown cleanly:
execute(common.stop)
execute(common.ensure_stopped)
product = dse if common.config['product'] == 'dse' else cstar
replace_existing_dse_install = bool(distutils.util.strtobool(str(cfg.get('replace_existing_dse_install', 'True'))))
# dse setup and binaries download (local)
if product == dse and replace_existing_dse_install:
dse.setup(common.config)
set_nodetool_path(os.path.join(product.get_bin_path(), 'nodetool'))
set_cqlsh_path(os.path.join(product.get_bin_path(), 'cqlsh'))
# Bootstrap C* onto the cluster nodes, as well as the localhost,
# so we have access to nodetool, stress etc
hosts = get_all_hosts(common.fab.env)
if not cfg.get('revision_override'):
with common.fab.settings(hosts=hosts):
git_ids = execute(common.bootstrap, git_fetch=git_fetch, replace_existing_dse_install=replace_existing_dse_install)
else:
# revision_override is only supported for the product cassandra
if product.name != 'cassandra':
raise ValueError("Cannot use revision_override for product: {}".format(
product.name))
git_ids = {}
default_hosts = set(hosts) - set(itertools.chain(*cfg['revision_override'].values()))
print 'default version on {default_hosts}'.format(default_hosts=default_hosts)
with common.fab.settings(hosts=default_hosts):
git_ids.update(execute(common.bootstrap, git_fetch=git_fetch))
for override_revision, hosts_to_override in cfg['revision_override'].items():
print '{revision} on {hosts_to_override}'.format(revision=override_revision, hosts_to_override=hosts_to_override)
with common.fab.settings(hosts=hosts_to_override):
git_ids.update(execute(common.bootstrap, git_fetch=git_fetch, revision_override=override_revision))
if product.name == 'cassandra':
overridden_host_versions = {}
for v, hs in cfg.get('revision_override', {}).items():
overridden_host_versions.update({h: v for h in hs})
expected_host_versions = dict({h: cfg['revision'] for h in hosts}, **overridden_host_versions)
expected_host_shas = {h: str(sh.git('--git-dir={home}/fab/cassandra.git'.format(home=HOME), 'rev-parse', v))
for (h, v) in expected_host_versions.items()}
expected_host_shas = {h: v.strip() for (h, v) in expected_host_shas.items()}
assert expected_host_shas == git_ids, 'expected: {}\ngot:{}'.format(expected_host_shas, git_ids)
execute(common.start)
time.sleep(15)
is_running = True
with fab.settings(abort_exception=SystemExit):
try:
execute(common.ensure_running, hosts=[common.config['seeds'][0]])
time.sleep(30)
except SystemExit:
is_running = False
if not is_running:
try:
retrieve_logs_and_create_tarball(job_id=_extract_job_id())
except Exception as e:
logger.warn(e)
pass
fab.abort('Cassandra is not up!')
logger.info("Started {product} on {n} nodes with git SHAs: {git_ids}".format(
product=product.name, n=len(common.fab.env['hosts']), git_ids=git_ids))
time.sleep(30)
return git_ids
def _extract_job_id():
# this will have a string looking as following: /home/cstar/.cstar_perf/jobs/<jobid>/stats.<jobid>.json
stats_log = common.config.get('log')
# will give us: <jobid>
return stats_log.split(os.path.sep)[-2]
def retrieve_logs_and_create_tarball(job_id):
log_dir = os.path.join(CSTAR_PERF_LOGS_DIR, job_id)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
retrieve_logs(log_dir)
# Tar them for archiving:
subprocess.Popen(shlex.split('tar cfvz {id}.tar.gz {id}'.format(id=job_id)), cwd=CSTAR_PERF_LOGS_DIR).communicate()
shutil.rmtree(log_dir)
def restart():
execute(common.stop)
execute(common.ensure_stopped)
execute(common.start)
execute(common.ensure_running)
def teardown(destroy=False, leave_data=False, kill_delay=0):
if destroy:
execute(common.destroy, leave_data=leave_data, kill_delay=kill_delay)
else:
execute(common.stop)
execute(common.ensure_stopped)
class NodetoolException(Exception):
pass
def nodetool(cmd):
"""Run a nodetool command
Raises NodetoolException if we can't connect or another error occurs:
"""
cmd = "JAVA_HOME={JAVA_HOME} {nodetool_path} {cmd}".format(
JAVA_HOME=JAVA_HOME, nodetool_path=nodetool_path, cmd=cmd)
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True)
output = proc.communicate()
if proc.returncode != 0:
raise NodetoolException(output)
return output[0]
def bash(script, nodes=None, user=None):
"""Run a bash script on a set of nodes
script - A bash script written as a string or list.
nodes - The set of nodes to run the command on. If None, all nodes of
the cluster will be used.
user - The user to run the command as. If None, the default user specified
in the cluster configuration
"""
if type(script) in (list, tuple):
script = "\n".join(script)
if nodes is None:
nodes = common.fab.env.hosts
if user is None:
user = common.fab.env.user
with common.fab.settings(user=user, hosts=nodes):
return execute(common.bash, script)
def cqlsh(script, node):
"""Run a cqlsh script on a node"""
global cqlsh_path
script = script.replace('\n', ' ')
cmd = '{cqlsh_path} --no-color {host} -e "{script}"'.format(cqlsh_path=cqlsh_path, host=node, script=script)
with common.fab.settings(fab.show('warnings', 'running', 'stdout', 'stderr'), hosts=node):
return execute(fab.run, cmd)[node]
def dse_cmd(node, options):
cmd = "JAVA_HOME={java_home} {dse_cmd} {options}".format(java_home=JAVA_HOME,
dse_cmd=os.path.join(dse.get_bin_path(), 'dse'),
options=options)
with common.fab.settings(fab.show('warnings', 'running', 'stdout', 'stderr'), hosts=node, warn_only=True):
return execute(fab.run, cmd)[node]
def dsetool_cmd(nodes, options):
"""Run a dsetool command simultaneously on each node specified"""
cmd = 'JAVA_HOME={java_home} {dsetool_cmd} {options}'.format(java_home=JAVA_HOME,
dsetool_cmd=os.path.join(dse.get_bin_path(),
'dsetool'), options=options)
with common.fab.settings(fab.show('warnings', 'running', 'stdout', 'stderr'), hosts=nodes, warn_only=True):
return execute(fab.run, cmd)
def get_spark_cassandra_stress_command(script, node, master, stress_node=None):
dse_bin = os.path.join(dse.get_dse_path(), 'bin')
# see conversation on https://github.com/datastax/cstar_perf/pull/226 for why we pass SPARK_MASTER
# tl;dr on DSE 4.7.x the dse script tries to call dsetool on the spark-cassandra-stress node
# if SPARK_MASTER env var is not set and this results in a connection error trace as
# we do not start DSE on the spark-cassandra-stress node
spark_cassandra_stress_cmd_prefix = 'cd {spark_cass_stress_path}; ' \
'PATH=$PATH:{dse_bin} ' \
'JAVA_HOME={JAVA_HOME} ' \
'DSE_HOME={dse_home} ' \
'SPARK_MASTER={master} '.format(spark_cass_stress_path=get_spark_cassandra_stress_path(stress_node=stress_node),
dse_bin=dse_bin,
JAVA_HOME=JAVA_HOME,
dse_home=dse.get_dse_path(),
master=master)
spark_cass_connection_host_arg = ' --conf spark.cassandra.connection.host={node}'.format(node=node)
spark_cassandra_run_cmd = './run.sh dse {script} {master} {connection_host}'.format(script=script,
master=master,
connection_host=spark_cass_connection_host_arg)
cmd = spark_cassandra_stress_cmd_prefix + ' ' + spark_cassandra_run_cmd
return cmd
def spark_cassandra_stress(script, nodes, stress_node=None, master=None,
build_spark_cassandra_stress=True, spark_data_dir=os.path.join('/', 'var', 'lib', 'spark'),
remove_existing_spark_data=True):
node0 = nodes[0]
cmd = get_spark_cassandra_stress_command(script, node0, master, stress_node=stress_node)
dse_cluster_user = execute(fab.run, 'whoami', hosts=node0)[node0]
if build_spark_cassandra_stress:
download_and_build_spark_cassandra_stress(stress_node=stress_node)
dse.setup_spark_data_dir(spark_data_dir, nodes, make_dir=True, set_permissions=True,
remove_existing_spark_data=remove_existing_spark_data,
user=dse_cluster_user)
if stress_node and not is_local_node(stress_node):
stress_user = execute(fab.run, 'whoami', hosts=stress_node)[stress_node]
# We must create /var/lib/spark/rdd on the stress node because when we call
# org.apache.spark.SparkEnv$.createDriverEnv it tries to create the local /var/lib/spark/rdd directory
# and if this does not exist or if we do not have permissions to create it, an error is thrown
dse.setup_spark_data_dir(os.path.join(spark_data_dir, 'rdd'), stress_node,
make_dir=True, set_permissions=True, user=stress_user)
spark_cassandra_stress_execute_out = execute(fab.run, cmd, hosts=stress_node)
return {'output': spark_cassandra_stress_execute_out,
'stats': get_spark_cassandra_stress_stats(spark_cassandra_stress_execute_out[stress_node].splitlines())}
else:
temp_log = tempfile.mktemp()
# see above explanation for creating /var/lib/spark/rdd directory
dse.setup_spark_data_dir(os.path.join(spark_data_dir, 'rdd'), get_localhost()[1],
make_dir=True, set_permissions=True, user=getpass.getuser())
logger.info('Running Spark-Cassandra-Stress using {cmd}'.format(cmd=cmd))
proc = subprocess.Popen('{cmd} | tee {temp_log}'.format(
JAVA_HOME=JAVA_HOME, cmd=cmd, temp_log=temp_log), shell=True)
proc.wait()
log = open(temp_log)
log_lines = log.readlines()
log.close()
os.remove(temp_log)
return {'output': log_lines,
'stats': get_spark_cassandra_stress_stats(log_lines)}
def get_spark_cassandra_stress_stats(output_list):
stats_dict = {}
# Do not fail if we can not find the stats.
try:
for line in output_list:
line = line.strip()
if "TimeInSeconds" in line:
stats_dict['TimeInSeconds'] = line.split(':')[1]
elif 'OpsPerSecond' in line:
stats_dict['OpsPerSecond'] = line.split(':')[1]
except (AttributeError, IndexError):
pass
return stats_dict
def get_spark_cassandra_stress_path(stress_node=None):
if stress_node:
return os.path.join(execute(fab.run, 'pwd', hosts=stress_node)[stress_node], 'fab', 'spark-cassandra-stress')
else:
return os.path.expanduser("~/fab/spark-cassandra-stress")
def download_and_build_spark_cassandra_stress(stress_node=None):
dse_home = 'DSE_HOME={dse_path}'.format(dse_path=dse.get_dse_path())
dse_resources = 'DSE_RESOURCES={dse_resources_path}'.format(dse_resources_path=os.path.join(dse.get_dse_path(), 'resources'))
spark_cassandra_stress_git = 'https://github.com/datastax/spark-cassandra-stress.git'
git_clone_spark_cass_stress_command = 'git clone -b master --single-branch ' \
'{spark_cass_stress_git} ' \
'{spark_cass_stress_path}'.format(spark_cass_stress_git=spark_cassandra_stress_git,
spark_cass_stress_path=get_spark_cassandra_stress_path(stress_node=stress_node))
build_command = './gradlew jar -Pagainst=dse;'
full_build_command = 'cd {spark_cass_stress_path}; TERM=dumb {dse_home} {dse_resources} {build_cmd}'.format(
spark_cass_stress_path=get_spark_cassandra_stress_path(),
dse_home=dse_home,
dse_resources=dse_resources,
build_cmd=build_command
)
if stress_node:
with common.fab.settings(hosts=stress_node):
execute(fab.run, 'rm -rf {spark_cass_stress_path}'.format(spark_cass_stress_path=get_spark_cassandra_stress_path(stress_node=stress_node)))
execute(fab.run, git_clone_spark_cass_stress_command)
execute(fab.run, full_build_command)
else:
shutil.rmtree(get_spark_cassandra_stress_path(), ignore_errors=True)
logger.info('Installing Spark-Cassandra-Stress from {spark_cass_stress_git}'.format(spark_cass_stress_git=spark_cassandra_stress_git))
proc = subprocess.Popen(git_clone_spark_cass_stress_command, shell=True)
proc.wait()
assert proc.returncode == 0, 'Installing Spark-Cassandra-Stress from {spark_cass_stress_git} ' \
'did not complete successfully'.format(spark_cass_stress_git=spark_cassandra_stress_git)
logger.info('Building Spark-Cassandra-Stress using {full_build_command}'.format(full_build_command=full_build_command))
proc = subprocess.Popen(full_build_command, shell=True)
proc.wait()
assert proc.returncode == 0, 'Building Spark-Cassandra-Stress using {full_build_command} ' \
'did not complete successfully'.format(full_build_command=full_build_command)
def nodetool_multi(nodes, command):
"""Run a nodetool command simultaneously on each node specified"""
with common.fab.settings(hosts=nodes):
return execute(common.multi_nodetool, command)
def wait_for_compaction(nodes=None, check_interval=30, idle_confirmations=3,
compaction_throughput=16, allowed_connection_errors=10):
"""Wait for all currently scheduled compactions to finish on all (or just specified) nodes
nodes - the nodes to check (None == all)
check_interval - the time to wait between checks
idle_confirmations - the number of checks that must show 0 compactions before we assume compactions are really done.
compaction_throughput - the default compaction_throughput_mb_per_sec setting from the cassandra.yaml
allowed_connection_errors - the number of consecutive connection errors allowed before we quit trying
returns the duration all compactions took (margin of error: check_interval * idle_confirmations)
"""
def compactionstats(nodes, check_interval):
"""Check for compactions via nodetool compactionstats"""
consecutive_connection_errors = 0
pattern = re.compile("(^|\n)pending tasks: 0")
failure_pattern = re.compile("ConnectException")
nodes = set(nodes)
while True:
results = execute(common.multi_nodetool, cmd="compactionstats")
for node, output in results.iteritems():
if pattern.search(output.strip()):
nodes.discard(node)
elif failure_pattern.search(output.strip()):
consecutive_connection_errors += 1
if consecutive_connection_errors > allowed_connection_errors:
raise NodetoolException(
"Failed to connect via nodetool {consecutive_connection_errors} times in a row.".format(
consecutive_connection_errors=consecutive_connection_errors))
if len(nodes) == 0:
break
logger.info("Waiting for compactions (compactionstats) on nodes:")
for node in nodes:
logger.info("{node} - {output}".format(node=node, output=results[node]))
time.sleep(check_interval)
assert len(nodes) == 0, ("Compactions (compactionstats) should have finished, but they didn't"
" on nodes: {nodes}. output: {results}".format(
nodes=nodes, output=results))
def tpstats(nodes, check_interval):
"""Check for compactions via nodetool tpstats"""
consecutive_connection_errors = 0
stat_exists_pattern = re.compile("^CompactionExecutor", re.MULTILINE)
no_compactions_pattern = re.compile("CompactionExecutor\W*0\W*0\W*[0-9]*\W*0", re.MULTILINE)
failure_pattern = re.compile("ConnectException")
nodes = set(nodes)
while True:
results = execute(common.multi_nodetool, cmd="tpstats")
for node, output in results.iteritems():
if stat_exists_pattern.search(output):
if no_compactions_pattern.search(output):
nodes.discard(node)
elif failure_pattern.search(output.strip()):
consecutive_connection_errors += 1
else:
logger.warn("CompactionExecutor not listed in nodetool tpstats, can't check for compactions this way.")
return
if consecutive_connection_errors > allowed_connection_errors:
raise NodetoolException(
"Failed to connect via nodetool {consecutive_connection_errors} times in a row.".format(
consecutive_connection_errors=consecutive_connection_errors))
if len(nodes) == 0:
break
logger.info("Waiting for compactions (tpstats) on nodes: {nodes}".format(nodes=nodes))
time.sleep(check_interval)
assert len(nodes) == 0, ("Compactions (tpstats) should have finished, but they didn't"
" on nodes: {nodes}. output: {results}".format(
nodes=nodes, output=results))
if nodes is None:
nodes = set(common.fab.env.hosts)
else:
nodes = set(nodes)
# Disable compaction throttling to speed things up:
execute(common.multi_nodetool, cmd="setcompactionthroughput 0")
# Perform checks multiple times to ensure compactions are really done:
start = time.time()
for i in range(idle_confirmations):
compactionstats(nodes, check_interval)
tpstats(nodes, check_interval)
duration = time.time() - start
# Re-enable compaction throttling:
execute(common.multi_nodetool, cmd='setcompactionthroughput {compaction_throughput}'.format(**locals()))
logger.info("Compactions finished on all nodes. Duration of checks: {duration}".format(**locals()))
return duration
def set_device_read_ahead(read_ahead, devices=None):
"""Set device read ahead.
If devices argument is None, use the 'block_devices' setting from the cluster config."""
if devices is None:
devices = config['block_devices']
execute(common.set_device_read_ahead, read_ahead, devices)
def drop_page_cache():
"""Drop the page cache"""
if not config.get('docker', False):
bash(['sync', 'echo 3 > /proc/sys/vm/drop_caches'], user='root')
def clean_stress():
# Clean all stress builds
stress_builds = [b for b in os.listdir(CASSANDRA_STRESS_PATH)]
for stress_build in stress_builds:
path = os.path.join(CASSANDRA_STRESS_PATH, stress_build)
logger.info("Removing stress build '{}'".format(path))
if os.path.islink(path):
os.unlink(path)
else:
shutil.rmtree(path)
def build_stress(stress_revision, name=None):
# Build a stress revision
try:
git_id = sh.git('--git-dir={home}/fab/cassandra.git'
.format(home=HOME), 'rev-parse', stress_revision).strip()
except sh.ErrorReturnCode:
raise AssertionError('Invalid stress_revision: {}'.format(stress_revision))
path = os.path.join(CASSANDRA_STRESS_PATH, git_id)
if not os.path.exists(path):
logger.info("Building cassandra-stress '{}' in '{}'.".format(stress_revision, path))
os.makedirs(path)
sh.tar(
sh.git("--git-dir={home}/fab/cassandra.git".format(home=HOME), "archive", git_id),
'x', '-C', path
)
antcmd('-Dbasedir={}'.format(path), '-f', '{}/build.xml'.format(path),
'realclean', 'jar', _env={"JAVA_TOOL_OPTIONS": "-Dfile.encoding=UTF8",
"JAVA_HOME": JAVA_HOME})
name = name if name else stress_revision
return {name: git_id}
def setup_stress(stress_revisions=[]):
revisions = {}
# first, build the default revision
default_stress_revision = config.get('stress_revision', 'apache/trunk')
revisions.update(build_stress(default_stress_revision, name='default'))
for stress_revision in stress_revisions:
revisions.update(build_stress(stress_revision))
return revisions
def stress(cmd, revision_tag, stress_sha, stats=None):
"""Run stress command and collect average statistics"""
# Check for compatible stress commands. This doesn't yet have full
# coverage of every option:
# Make sure that if this is a read op, that the number of threads
# was specified, otherwise stress defaults to doing multiple runs
# which is not what we want:
if cmd.strip().startswith("read") and 'threads' not in cmd:
raise AssertionError('Stress read commands must specify #/threads when used with this tool.')
stress_path = os.path.join(CASSANDRA_STRESS_PATH, stress_sha, 'tools/bin/cassandra-stress')
temp_log = tempfile.mktemp()
logger.info("Running stress from '{stress_path}' : {cmd}"
.format(stress_path=stress_path, cmd=cmd))
# Record the type of operation being performed:
operation = cmd.strip().split(" ")[0]
if stats is None:
stats = {
"id": str(uuid.uuid1()),
"command": cmd,
"intervals": [],
"test": operation,
"revision": revision_tag,
"date": datetime.datetime.now().isoformat(),
"stress_revision": stress_sha
}
# Run stress:
# Subprocess communicate() blocks, preventing us from seeing any
# realtime output, so pipe the output to a file as a workaround:
proc = subprocess.Popen('JAVA_HOME={JAVA_HOME} {CASSANDRA_STRESS} {cmd} | tee {temp_log}'
.format(JAVA_HOME=JAVA_HOME,
CASSANDRA_STRESS=stress_path,
cmd=cmd, temp_log=temp_log), shell=True)
proc.wait()
log = open(temp_log)
collecting_aggregates = False
collecting_values = False
# Regex for trunk cassandra-stress
start_of_intervals_re = re.compile('type.*total ops,.*op/s,.*pk/s')
for line in log:
line = line.strip()
if line.startswith("Results:"):
collecting_aggregates = True
continue
if not collecting_aggregates:
if start_of_intervals_re.match(line):
collecting_values = True
continue
if collecting_values:
line_parts = [l.strip() for l in line.split(',')]
# Only capture total metrics for now
if line_parts[0] == 'total':
try:
stats['intervals'].append([float(x) for x in line_parts[1:]])
except:
pass
continue
continue
if line.startswith("END") or line.strip() == "":
continue
# Collect aggregates:
try:
stat, value = line.split(":", 1)
stats[stat.strip().lower()] = value.strip()
except ValueError:
logger.info("Unable to parse aggregate line: '{}'".format(line))
log.close()
os.remove(temp_log)
return stats
def retrieve_logs(local_directory):
"""Retrieve each node's logs to the given local directory."""
execute(common.copy_logs, local_directory=local_directory)
def retrieve_fincore_logs(local_directory):
"""Retrieve each node's fincore logs to the given local directory."""
execute(common.copy_fincore_logs, local_directory=local_directory)
def retrieve_flamegraph(local_directory, rev_num):
"""Retrieve each node's flamegraph data and svg to the given local directory."""
execute(flamegraph.copy_flamegraph, local_directory=local_directory, rev_num=rev_num)
def retrieve_yourkit(local_directory, rev_num):
"""Retrieve each node's yourkit data to the given local directory."""
execute(profiler.copy_yourkit, local_directory=local_directory, rev_num=rev_num)
def start_fincore_capture(interval=10):
"""Start linux-fincore monitoring of Cassandra data files on each node"""
execute(common.start_fincore_capture, interval=interval)
def stop_fincore_capture():
"""Stop linux-fincore monitoring"""
execute(common.stop_fincore_capture)
def log_add_data(file, data):
"""Merge the dictionary data into the json log file root."""
with open(file) as f:
log = f.read()
log = json.loads(log)
log.update(data)
log = json.dumps(log, sort_keys=True, indent=4, separators=(', ', ': '))
with open(file, 'w') as f:
f.write(log)
def log_set_title(file, title, subtitle=''):
log_add_data(file, {'title': title, 'subtitle': subtitle})
def log_stats(stats, memo=None, file='stats.json'):
"""Log results"""
# TODO: this should go back into a cassandra store for long term
# keeping
if not os.path.exists(file) or os.path.getsize(file) == 0:
with open(file, 'w') as f:
f.write(json.dumps({'title': 'Title goes here', 'stats':[]}))
with open(file) as f:
log = f.read()
log = json.loads(log)
if memo:
stats.update({'memo': memo})
log['stats'].append(stats)
log = json.dumps(log, sort_keys=True, indent=4, separators=(', ', ': '))
with open(file, 'w') as f:
f.write(log)
| 42.754362
| 201
| 0.643225
|
1675b5c8335969ab24e623208993dceba23908b6
| 1,371
|
py
|
Python
|
tests/test_get_upcoming.py
|
ludeeus/pylaunches
|
8f44f9b6084bb93f0429e17e94deb7af47a01a9b
|
[
"MIT"
] | 1
|
2021-11-14T19:08:52.000Z
|
2021-11-14T19:08:52.000Z
|
tests/test_get_upcoming.py
|
ludeeus/pylaunches
|
8f44f9b6084bb93f0429e17e94deb7af47a01a9b
|
[
"MIT"
] | 2
|
2020-10-30T19:13:16.000Z
|
2022-01-25T17:00:36.000Z
|
tests/test_get_upcoming.py
|
ludeeus/pylaunches
|
8f44f9b6084bb93f0429e17e94deb7af47a01a9b
|
[
"MIT"
] | 4
|
2019-03-04T03:05:53.000Z
|
2022-01-25T02:01:28.000Z
|
import aiohttp
import pytest
from pylaunches import PyLaunches, PyLaunchesException, PyLaunchesNoData
from pylaunches.const import HEADERS
from tests.common import fixture
@pytest.mark.asyncio
async def test_upcoming_launches(aresponses):
response = fixture("upcoming.json", False)
aresponses.add(
"ll.thespacedevs.com",
"/2.0.0/launch/upcoming/",
"get",
aresponses.Response(text=response, headers=HEADERS),
)
async with PyLaunches() as client:
launches = await client.upcoming_launches()
first = launches[0]
assert first.name == "Example | Example-01"
assert isinstance(first.raw_data_contents, dict)
@pytest.mark.asyncio
async def test_upcoming_launches_exceptions(aresponses):
aresponses.add(
"ll.thespacedevs.com",
"/2.0.0/launch/upcoming/",
"get",
aresponses.Response(text="{}", headers=HEADERS),
)
aresponses.add(
"ll.thespacedevs.com",
"/2.0.0/launch/upcoming/",
"get",
aresponses.Response(text="{}", headers=HEADERS, status=500),
)
async with PyLaunches() as client:
with pytest.raises(PyLaunchesNoData):
await client.upcoming_launches()
async with PyLaunches() as client:
with pytest.raises(PyLaunchesException):
await client.upcoming_launches()
| 28.5625
| 72
| 0.66302
|
ec12f0816bf0fdd929ad122fa466581596248d4a
| 32,514
|
py
|
Python
|
Python/libraries/recognizers-date-time/recognizers_date_time/resources/portuguese_date_time.py
|
XiaoxiaoMa0815/Recognizers-Text
|
d9a4bc939348bd79b5982345255961dff5f356c6
|
[
"MIT"
] | 1
|
2020-12-02T03:35:04.000Z
|
2020-12-02T03:35:04.000Z
|
Python/libraries/recognizers-date-time/recognizers_date_time/resources/portuguese_date_time.py
|
XiaoxiaoMa0815/Recognizers-Text
|
d9a4bc939348bd79b5982345255961dff5f356c6
|
[
"MIT"
] | 1
|
2021-02-24T00:16:41.000Z
|
2021-02-24T00:16:41.000Z
|
Python/libraries/recognizers-date-time/recognizers_date_time/resources/portuguese_date_time.py
|
XiaoxiaoMa0815/Recognizers-Text
|
d9a4bc939348bd79b5982345255961dff5f356c6
|
[
"MIT"
] | 1
|
2020-12-02T04:55:25.000Z
|
2020-12-02T04:55:25.000Z
|
# ------------------------------------------------------------------------------
# <auto-generated>
# This code was generated by a tool.
# Changes to this file may cause incorrect behavior and will be lost if
# the code is regenerated.
# </auto-generated>
#
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# ------------------------------------------------------------------------------
from .base_date_time import BaseDateTime
# pylint: disable=line-too-long
class PortugueseDateTime:
LangMarker = 'Por'
CheckBothBeforeAfter = False
TillRegex = f'(?<till>\\b(at[eé]h?|[aà]s|ao?)\\b|--|-|—|——)(\\s+\\b(o|[aà](s)?)\\b)?'
RangeConnectorRegex = f'(?<and>(e\\s*(([àa]s?)|o)?)|{BaseDateTime.RangeConnectorSymbolRegex})'
DayRegex = f'(?<day>01|02|03|04|05|06|07|08|09|10|11|12|13|14|15|16|17|18|19|1|20|21|22|23|24|25|26|27|28|29|2|30|31|3|4|5|6|7|8|9)(?=\\b|t)'
MonthNumRegex = f'(?<month>01|02|03|04|05|06|07|08|09|10|11|12|1|2|3|4|5|6|7|8|9)\\b'
AmDescRegex = f'({BaseDateTime.BaseAmDescRegex})'
PmDescRegex = f'({BaseDateTime.BasePmDescRegex})'
AmPmDescRegex = f'({BaseDateTime.BaseAmPmDescRegex})'
DescRegex = f'(?<desc>({AmDescRegex}|{PmDescRegex}))'
OfPrepositionRegex = f'(\\bd(o|a|e)s?\\b)'
AfterNextSuffixRegex = f'\\b(que\\s+vem|passad[oa])\\b'
RangePrefixRegex = f'((de(sde)?|das?|entre)\\s+(a(s)?\\s+)?)'
TwoDigitYearRegex = f'\\b(?<![$])(?<year>([0-24-9]\\d))(?!(\\s*((\\:\\d)|{AmDescRegex}|{PmDescRegex}|\\.\\d)))\\b'
RelativeRegex = f'(?<rela>((est[ae]|pr[oó]xim[oa]|([uú]ltim(o|as|os)))(\\s+fina(l|is)\\s+d[eao])?)|(fina(l|is)\\s+d[eao]))\\b'
StrictRelativeRegex = f'(?<rela>((est[ae]|pr[oó]xim[oa]|([uú]ltim(o|as|os)))(\\s+fina(l|is)\\s+d[eao])?)|(fina(l|is)\\s+d[eao]))\\b'
WrittenOneToNineRegex = f'(uma?|dois|duas|tr[eê]s|quatro|cinco|seis|sete|oito|nove)'
WrittenOneHundredToNineHundredRegex = f'(duzent[oa]s|trezent[oa]s|[cq]uatrocent[ao]s|quinhent[ao]s|seiscent[ao]s|setecent[ao]s|oitocent[ao]s|novecent[ao]s|cem|(?<!por\\s+)(cento))'
WrittenOneToNinetyNineRegex = f'(((vinte|trinta|[cq]uarenta|cinquenta|sessenta|setenta|oitenta|noventa)(\\s+e\\s+{WrittenOneToNineRegex})?)|d[eé]z|onze|doze|treze|(c|qu)atorze|quinze|dez[ea]sseis|dez[ea]ssete|dez[ea]nove|dezoito|uma?|d(oi|ua)s|tr[eê]s|quatro|cinco|seis|sete|oito|nove)'
FullTextYearRegex = f'\\b(?<fullyear>((dois\\s+)?mil)((\\s+e)?\\s+{WrittenOneHundredToNineHundredRegex})?((\\s+e)?\\s+{WrittenOneToNinetyNineRegex})?)'
YearRegex = f'({BaseDateTime.FourDigitYearRegex}|{FullTextYearRegex})'
RelativeMonthRegex = f'(?<relmonth>([nd]?es[st]e|pr[óo]ximo|passsado|[uú]ltimo)\\s+m[eê]s)\\b'
MonthRegex = f'(?<month>abr(il)?|ago(sto)?|dez(embro)?|fev(ereiro)?|jan(eiro)?|ju[ln](ho)?|mar([çc]o)?|maio?|nov(embro)?|out(ubro)?|sep?t(embro)?)'
MonthSuffixRegex = f'(?<msuf>((em|no)\\s+|d[eo]\\s+)?({RelativeMonthRegex}|{MonthRegex}))'
DateUnitRegex = f'(?<unit>anos?|meses|m[êe]s|semanas?|dias?)\\b'
PastRegex = f'(?<past>\\b(passad[ao](s)?|[uú]ltim[oa](s)?|anterior(es)?|h[aá]|pr[ée]vi[oa](s)?)\\b)'
FutureRegex = f'(?<past>\\b(seguinte(s)?|pr[oó]xim[oa](s)?|dentro\\s+de|em|daqui\\s+a)\\b)'
SimpleCasesRegex = f'\\b((desde\\s+[oa]|desde|d[oa])\\s+)?(dia\\s+)?({DayRegex})\\s*{TillRegex}\\s*(o dia\\s+)?({DayRegex})\\s+{MonthSuffixRegex}((\\s+|\\s*,\\s*){YearRegex})?\\b'
MonthFrontSimpleCasesRegex = f'\\b{MonthSuffixRegex}\\s+((desde\\s+[oa]|desde|d[oa])\\s+)?(dia\\s+)?({DayRegex})\\s*{TillRegex}\\s*({DayRegex})((\\s+|\\s*,\\s*){YearRegex})?\\b'
MonthFrontBetweenRegex = f'\\b{MonthSuffixRegex}\\s+((entre|entre\\s+[oa]s?)\\s+)(dias?\\s+)?({DayRegex})\\s*{RangeConnectorRegex}\\s*({DayRegex})((\\s+|\\s*,\\s*){YearRegex})?\\b'
DayBetweenRegex = f'\\b((entre|entre\\s+[oa]s?)\\s+)(dia\\s+)?({DayRegex})\\s*{RangeConnectorRegex}\\s*({DayRegex})\\s+{MonthSuffixRegex}((\\s+|\\s*,\\s*){YearRegex})?\\b'
OneWordPeriodRegex = f'\\b(((pr[oó]xim[oa]?|[nd]?es[st]e|aquel[ea]|[uú]ltim[oa]?|em)\\s+)?(?<month>abr(il)?|ago(sto)?|dez(embro)?|fev(ereiro)?|jan(eiro)?|ju[ln](ho)?|mar([çc]o)?|maio?|nov(embro)?|out(ubro)?|sep?t(embro)?)|(?<=\\b(de|do|da|o|a)\\s+)?(pr[oó]xim[oa](s)?|[uú]ltim[oa]s?|est(e|a))\\s+(fim de semana|fins de semana|semana|m[êe]s|ano)|fim de semana|fins de semana|(m[êe]s|anos)? [àa] data)\\b'
MonthWithYearRegex = f'\\b(((pr[oó]xim[oa](s)?|[nd]?es[st]e|aquele|[uú]ltim[oa]?|em)\\s+)?(?<month>abr(il)?|ago(sto)?|dez(embro)?|fev(ereiro)?|jan(eiro)?|ju[ln](ho)?|mar([çc]o)?|maio?|nov(embro)?|out(ubro)?|sep?t(embro)?)\\s+((de|do|da|o|a)\\s+)?({YearRegex}|{TwoDigitYearRegex}|(?<order>pr[oó]ximo(s)?|[uú]ltimo?|[nd]?es[st]e)\\s+ano))\\b'
MonthNumWithYearRegex = f'({YearRegex}(\\s*?)[/\\-\\.](\\s*?){MonthNumRegex})|({MonthNumRegex}(\\s*?)[/\\-](\\s*?){YearRegex})'
WeekOfMonthRegex = f'(?<wom>(a|na\\s+)?(?<cardinal>primeira?|1a|segunda|2a|terceira|3a|[qc]uarta|4a|quinta|5a|[uú]ltima)\\s+semana\\s+{MonthSuffixRegex})'
WeekOfYearRegex = f'(?<woy>(a|na\\s+)?(?<cardinal>primeira?|1a|segunda|2a|terceira|3a|[qc]uarta|4a|quinta|5a|[uú]ltima?)\\s+semana(\\s+d[oe]?)?\\s+({YearRegex}|(?<order>pr[oó]ximo|[uú]ltimo|[nd]?es[st]e)\\s+ano))'
FollowedDateUnit = f'^\\s*{DateUnitRegex}'
NumberCombinedWithDateUnit = f'\\b(?<num>\\d+(\\.\\d*)?){DateUnitRegex}'
QuarterRegex = f'(n?o\\s+)?(?<cardinal>primeiro|1[oº]|segundo|2[oº]|terceiro|3[oº]|[qc]uarto|4[oº])\\s+trimestre(\\s+d[oe]|\\s*,\\s*)?\\s+({YearRegex}|(?<order>pr[oó]ximo(s)?|[uú]ltimo?|[nd]?es[st]e)\\s+ano)'
QuarterRegexYearFront = f'({YearRegex}|(?<order>pr[oó]ximo(s)?|[uú]ltimo?|[nd]?es[st]e)\\s+ano)\\s+(n?o\\s+)?(?<cardinal>(primeiro)|1[oº]|segundo|2[oº]|terceiro|3[oº]|[qc]uarto|4[oº])\\s+trimestre'
AllHalfYearRegex = f'^[.]'
PrefixDayRegex = f'^[.]'
SeasonRegex = f'\\b(?<season>(([uú]ltim[oa]|[nd]?es[st][ea]|n?[oa]|(pr[oó]xim[oa]s?|seguinte))\\s+)?(?<seas>primavera|ver[ãa]o|outono|inverno)((\\s+)?(seguinte|((de\\s+|,)?\\s*{YearRegex})|((do\\s+)?(?<order>pr[oó]ximo|[uú]ltimo|[nd]?es[st]e)\\s+ano)))?)\\b'
WhichWeekRegex = f'\\b(semana)(\\s*)(?<number>5[0-3]|[1-4]\\d|0?[1-9])\\b'
WeekOfRegex = f'(semana)(\\s*)((do|da|de))'
MonthOfRegex = f'(mes)(\\s*)((do|da|de))'
RangeUnitRegex = f'\\b(?<unit>anos?|meses|m[êe]s|semanas?)\\b'
BeforeAfterRegex = f'^[.]'
InConnectorRegex = f'\\b(em)\\b'
SinceYearSuffixRegex = f'^[.]'
WithinNextPrefixRegex = f'^[.]'
TodayNowRegex = f'\\b(hoje|agora)\\b'
CenturySuffixRegex = f'^[.]'
FromRegex = f'((desde|de)(\\s*a(s)?)?)$'
BetweenRegex = f'(entre\\s*([oa](s)?)?)'
WeekDayRegex = f'\\b(?<weekday>(domingos?|(segunda|ter[çc]a|quarta|quinta|sexta)s?([-\\s+]feiras?)?|s[aá]bados?|(2|3|4|5|6)[aª])\\b|(dom|seg|ter[cç]|qua|qui|sex|sab)\\b(\\.?(?=\\s|,|;|$)))'
OnRegex = f'(?<=\\b(em|no)\\s+)({DayRegex}s?)\\b'
RelaxedOnRegex = f'(?<=\\b(em|n[oa]|d[oa])\\s+)(dia\\s+)?((?<day>10|11|12|13|14|15|16|17|18|19|1|20|21|22|23|24|25|26|27|28|29|2|30|31|3|4|5|6|7|8|9)s?)\\b'
ThisRegex = f'\\b(([nd]?es[st][ea]\\s*){WeekDayRegex})|({WeekDayRegex}\\s*([nd]?es[st]a\\s+semana))\\b'
LastDateRegex = f'\\b(([uú]ltim[ao])\\s*{WeekDayRegex})|({WeekDayRegex}(\\s+(([nd]?es[st]a|na|da)\\s+([uú]ltima\\s+)?semana)))\\b'
NextDateRegex = f'\\b(((pr[oó]xim[oa]|seguinte)\\s*){WeekDayRegex})|({WeekDayRegex}((\\s+(pr[oó]xim[oa]|seguinte))|(\\s+(da\\s+)?(semana\\s+seguinte|pr[oó]xima\\s+semana))))\\b'
SpecialDayRegex = f'\\b((d?o\\s+)?(dia\\s+antes\\s+de\\s+ontem|antes\\s+de\\s+ontem|anteontem)|((d?o\\s+)?(dia\\s+|depois\\s+|dia\\s+depois\\s+)?de\\s+amanh[aã])|(o\\s)?dia\\s+seguinte|(o\\s)?pr[oó]ximo\\s+dia|(o\\s+)?[uú]ltimo\\s+dia|ontem|amanh[ãa]|hoje)|(do\\s+dia$)\\b'
SpecialDayWithNumRegex = f'^[.]'
ForTheRegex = f'.^'
WeekDayAndDayOfMonthRegex = f'.^'
WeekDayAndDayRegex = f'.^'
WeekDayOfMonthRegex = f'(?<wom>(n?[ao]\\s+)?(?<cardinal>primeir[ao]|1[ao]|segund[ao]|2[ao]|terceir[ao]|3[ao]|[qc]uart[ao]|4[ao]|quint[ao]|5[ao]|[uú]ltim[ao])\\s+{WeekDayRegex}\\s+{MonthSuffixRegex})'
RelativeWeekDayRegex = f'^[.]'
AmbiguousRangeModifierPrefix = f'^[.]'
NumberEndingPattern = f'^[.]'
SpecialDateRegex = f'(?<=\\bno\\s+){DayRegex}\\b'
OfMonthRegex = f'^\\s*de\\s*{MonthSuffixRegex}'
MonthEndRegex = f'({MonthRegex}\\s*(o)?\\s*$)'
WeekDayEnd = f'{WeekDayRegex}\\s*,?\\s*$'
WeekDayStart = f'^[\\.]'
DateYearRegex = f'(?<year>{YearRegex}|{TwoDigitYearRegex})'
DateExtractor1 = f'\\b({WeekDayRegex}(\\s+|\\s*,\\s*))?{DayRegex}?((\\s*(de)|[/\\\\\\.\\-])\\s*)?{MonthRegex}\\b'
DateExtractor2 = f'\\b({WeekDayRegex}(\\s+|\\s*,\\s*))?{DayRegex}\\s*([\\.\\-]|de)?\\s*{MonthRegex}(\\s*(,|de)\\s*){DateYearRegex}\\b'
DateExtractor3 = f'\\b({WeekDayRegex}(\\s+|\\s*,\\s*))?{DayRegex}(\\s+|\\s*,\\s*|\\s+de\\s+|\\s*-\\s*){MonthRegex}((\\s+|\\s*(,|de)\\s*){DateYearRegex})?\\b'
DateExtractor4 = f'\\b{MonthNumRegex}\\s*[/\\\\\\-]\\s*{DayRegex}\\s*[/\\\\\\-]\\s*{DateYearRegex}(?!\\s*[/\\\\\\-\\.]\\s*\\d+)'
DateExtractor5 = f'\\b{DayRegex}\\s*[/\\\\\\-\\.]\\s*({MonthNumRegex}|{MonthRegex})\\s*[/\\\\\\-\\.]\\s*{DateYearRegex}(?!\\s*[/\\\\\\-\\.]\\s*\\d+)'
DateExtractor6 = f'(?<=\\b(em|no|o)\\s+){MonthNumRegex}[\\-\\.]{DayRegex}\\b'
DateExtractor7 = f'\\b{MonthNumRegex}\\s*/\\s*{DayRegex}((\\s+|\\s*(,|de)\\s*){DateYearRegex})?\\b'
DateExtractor8 = f'(?<=\\b(em|no|o)\\s+){DayRegex}[\\\\\\-]{MonthNumRegex}\\b'
DateExtractor9 = f'\\b{DayRegex}\\s*/\\s*{MonthNumRegex}((\\s+|\\s*(,|de)\\s*){DateYearRegex})?\\b'
DateExtractor10 = f'\\b{YearRegex}\\s*[/\\\\\\-\\.]\\s*{MonthNumRegex}\\s*[/\\\\\\-\\.]\\s*{DayRegex}(?!\\s*[/\\\\\\-\\.]\\s*\\d+)'
DateExtractor11 = f'(?<=\\b(dia)\\s+){DayRegex}'
HourNumRegex = f'\\b(?<hournum>zero|uma|duas|tr[êe]s|[qc]uatro|cinco|seis|sete|oito|nove|dez|onze|doze)\\b'
MinuteNumRegex = f'(?<minnum>um|dois|tr[êe]s|[qc]uatro|cinco|seis|sete|oito|nove|dez|onze|doze|treze|catorze|quatorze|quinze|dez[ea]sseis|dez[ea]sete|dezoito|dez[ea]nove|vinte|trinta|[qc]uarenta|cin[qc]uenta)'
DeltaMinuteNumRegex = f'(?<deltaminnum>um|dois|tr[êe]s|[qc]uatro|cinco|seis|sete|oito|nove|dez|onze|doze|treze|catorze|quatorze|quinze|dez[ea]sseis|dez[ea]sete|dezoito|dez[ea]nove|vinte|trinta|[qc]uarenta|cin[qc]uenta)'
OclockRegex = f'(?<oclock>em\\s+ponto)'
PmRegex = f'(?<pm>((pela|de|da|\\b[àa]\\b|na)\\s+(tarde|noite)))|((depois\\s+do|ap[óo]s\\s+o)\\s+(almo[çc]o|meio dia|meio-dia))'
AmRegex = f'(?<am>(pela|de|da|na)\\s+(manh[ãa]|madrugada))'
AmTimeRegex = f'(?<am>([dn]?es[st]a|(pela|de|da|na))\\s+(manh[ãa]|madrugada))'
PmTimeRegex = f'(?<pm>(([dn]?es[st]a|\\b[àa]\\b|(pela|de|da|na))\\s+(tarde|noite)))|((depois\\s+do|ap[óo]s\\s+o)\\s+(almo[çc]o|meio dia|meio-dia))'
LessThanOneHour = f'(?<lth>((\\s+e\\s+)?(quinze|(um\\s+|dois\\s+|tr[êes]\\s+)?quartos?)|quinze|(\\s*)(um\\s+|dois\\s+|tr[êes]\\s+)?quartos?|(\\s+e\\s+)(meia|trinta)|{BaseDateTime.DeltaMinuteRegex}(\\s+(minuto|minutos|min|mins))|{DeltaMinuteNumRegex}(\\s+(minuto|minutos|min|mins))))'
TensTimeRegex = f'(?<tens>dez|vinte|trinta|[qc]uarenta|cin[qc]uenta)'
WrittenTimeRegex = f'(?<writtentime>({HourNumRegex}\\s*((e|menos)\\s+)?({MinuteNumRegex}|({TensTimeRegex}((\\s*e\\s+)?{MinuteNumRegex}))))|(({MinuteNumRegex}|({TensTimeRegex}((\\s*e\\s+)?{MinuteNumRegex})?))\\s*((para as|pras|antes da|antes das)\\s+)?({HourNumRegex}|{BaseDateTime.HourRegex})))'
TimePrefix = f'(?<prefix>{LessThanOneHour}(\\s+(passad[ao]s)\\s+(as)?|\\s+depois\\s+(das?|do)|\\s+pras?|\\s+(para|antes)?\\s+([àa]s?))?)'
TimeSuffix = f'(?<suffix>({LessThanOneHour}\\s+)?({AmRegex}|{PmRegex}|{OclockRegex}))'
BasicTime = f'(?<basictime>{WrittenTimeRegex}|{HourNumRegex}|{BaseDateTime.HourRegex}:{BaseDateTime.MinuteRegex}(:{BaseDateTime.SecondRegex})?|{BaseDateTime.HourRegex})'
AtRegex = f'\\b((?<=\\b([aà]s?)\\s+)({WrittenTimeRegex}|{HourNumRegex}|{BaseDateTime.HourRegex})(\\s+horas?|\\s*h\\b)?|(?<=\\b(s(er)?[aã]o|v[aã]o\\s+ser|^[eé]h?)\\s+)({WrittenTimeRegex}|{HourNumRegex}|{BaseDateTime.HourRegex})(\\s+horas?|\\s*h\\b))(\\s+{OclockRegex})?\\b'
ConnectNumRegex = f'({BaseDateTime.HourRegex}(?<min>00|01|02|03|04|05|06|07|08|09|10|11|12|13|14|15|16|17|18|19|20|21|22|23|24|25|26|27|28|29|30|31|32|33|34|35|36|37|38|39|40|41|42|43|44|45|46|47|48|49|50|51|52|53|54|55|56|57|58|59)\\s*{DescRegex})'
TimeRegex1 = f'(\\b{TimePrefix}\\s+)?({WrittenTimeRegex}|{HourNumRegex}|{BaseDateTime.HourRegex})\\s*({DescRegex})'
TimeRegex2 = f'(\\b{TimePrefix}\\s+)?(t)?{BaseDateTime.HourRegex}(\\s*)?:(\\s*)?{BaseDateTime.MinuteRegex}((\\s*)?:(\\s*)?{BaseDateTime.SecondRegex})?((\\s*{DescRegex})|\\b)'
TimeRegex3 = f'(\\b{TimePrefix}\\s+)?{BaseDateTime.HourRegex}\\.{BaseDateTime.MinuteRegex}(\\s*{DescRegex})'
TimeRegex4 = f'\\b(({DescRegex}?)|({BasicTime}?)({DescRegex}?))({TimePrefix}\\s*)({HourNumRegex}|{BaseDateTime.HourRegex})?(\\s+{TensTimeRegex}(\\s+e\\s+)?{MinuteNumRegex}?)?({OclockRegex})?\\b'
TimeRegex5 = f'\\b({TimePrefix}|{BasicTime}{TimePrefix})\\s+(\\s*{DescRegex})?{BasicTime}?\\s*{TimeSuffix}\\b'
TimeRegex6 = f'({BasicTime}(\\s*{DescRegex})?\\s+{TimeSuffix}\\b)'
TimeRegex7 = f'\\b{TimeSuffix}\\s+[àa]s?\\s+{BasicTime}((\\s*{DescRegex})|\\b)'
TimeRegex8 = f'\\b{TimeSuffix}\\s+{BasicTime}((\\s*{DescRegex})|\\b)'
TimeRegex9 = f'\\b(?<writtentime>{HourNumRegex}\\s+({TensTimeRegex}\\s*)(e\\s+)?{MinuteNumRegex}?)\\b'
TimeRegex10 = f'(\\b([àa]|ao?)|na|de|da|pela)\\s+(madrugada|manh[ãa]|meio\\s*dia|meia\\s*noite|tarde|noite)'
TimeRegex11 = f'\\b({WrittenTimeRegex})(\\s+{DescRegex})?\\b'
TimeRegex12 = f'(\\b{TimePrefix}\\s+)?{BaseDateTime.HourRegex}(\\s*h\\s*){BaseDateTime.MinuteRegex}(\\s*{DescRegex})?'
PrepositionRegex = f'(?<prep>([àa]s?|em|por|pelo|pela|no|na|de|d[oa]?)?$)'
NowRegex = f'\\b(?<now>((logo|exatamente)\\s+)?agora(\\s+mesmo)?|neste\\s+momento|(assim\\s+que|t[ãa]o\\s+cedo\\s+quanto)\\s+(poss[ií]vel|possas?|possamos)|o\\s+mais\\s+(cedo|r[aá]pido)\\s+poss[íi]vel|recentemente|previamente)\\b'
SuffixRegex = f'^\\s*((e|a|em|por|pelo|pela|no|na|de)\\s+)?(manh[ãa]|madrugada|meio\\s*dia|tarde|noite)\\b'
TimeOfDayRegex = f'\\b(?<timeOfDay>manh[ãa]|madrugada|tarde|noite|((depois\\s+do|ap[óo]s\\s+o)\\s+(almo[çc]o|meio dia|meio-dia)))\\b'
SpecificTimeOfDayRegex = f'\\b(((((a)?\\s+|[nd]?es[st]a|seguinte|pr[oó]xim[oa]|[uú]ltim[oa])\\s+)?{TimeOfDayRegex}))\\b'
TimeOfTodayAfterRegex = f'^\\s*(,\\s*)?([àa]|em|por|pelo|pela|de|no|na?\\s+)?{SpecificTimeOfDayRegex}'
TimeOfTodayBeforeRegex = f'({SpecificTimeOfDayRegex}(\\s*,)?(\\s+(a\\s+la(s)?|para))?\\s*)'
SimpleTimeOfTodayAfterRegex = f'({HourNumRegex}|{BaseDateTime.HourRegex})\\s*(,\\s*)?((en|de(l)?)?\\s+)?{SpecificTimeOfDayRegex}'
SimpleTimeOfTodayBeforeRegex = f'({SpecificTimeOfDayRegex}(\\s*,)?(\\s+(a\\s+la|para))?\\s*({HourNumRegex}|{BaseDateTime.HourRegex}))'
SpecificEndOfRegex = f'((no|ao)\\s+)?(fi(m|nal)|t[ée]rmin(o|ar))(\\s+d?o(\\s+dia)?(\\s+de)?)?\\s*$'
UnspecificEndOfRegex = f'^[.]'
UnspecificEndOfRangeRegex = f'^[.]'
UnitRegex = f'(?<unit>anos|ano|meses|m[êe]s|semanas|semana|dias|dia|horas|hora|h|hr|hrs|hs|minutos|minuto|mins|min|segundos|segundo|segs|seg)\\b'
ConnectorRegex = f'^(,|t|para [ao]|para as|pras|cerca de|cerca das|perto de|perto das|quase)$'
TimeHourNumRegex = f'(?<hour>vinte e um|vinte e dois|vinte e tr[êe]s|vinte e quatro|zero|um|uma|dois|duas|tr[êe]s|quatro|cinco|seis|sete|oito|nove|dez|onze|doze|treze|quatorze|catorze|quinze|dez[ea]sseis|dez[ea]ssete|dezoito|dez[ea]nove|vinte)'
PureNumFromTo = f'((desde|de|da|das)\\s+(a(s)?\\s+)?)?({BaseDateTime.HourRegex}|{TimeHourNumRegex})(\\s*(?<leftDesc>{DescRegex}))?\\s*{TillRegex}\\s*({BaseDateTime.HourRegex}|{TimeHourNumRegex})\\s*(?<rightDesc>{PmRegex}|{AmRegex}|{DescRegex})?'
PureNumBetweenAnd = f'(entre\\s+((a|as)?\\s+)?)({BaseDateTime.HourRegex}|{TimeHourNumRegex})(\\s*(?<leftDesc>{DescRegex}))?\\s*e\\s*(a(s)?\\s+)?({BaseDateTime.HourRegex}|{TimeHourNumRegex})\\s*(?<rightDesc>{PmRegex}|{AmRegex}|{DescRegex})?'
SpecificTimeFromTo = f'^[.]'
SpecificTimeBetweenAnd = f'^[.]'
TimeUnitRegex = f'(?<unit>horas|hora|h|minutos|minuto|mins|min|segundos|segundo|secs|sec)\\b'
TimeFollowedUnit = f'^\\s*{TimeUnitRegex}'
TimeNumberCombinedWithUnit = f'\\b(?<num>\\d+(\\,\\d*)?)\\s*{TimeUnitRegex}'
DateTimePeriodNumberCombinedWithUnit = f'\\b(?<num>\\d+(\\.\\d*)?)\\s*{TimeUnitRegex}'
PeriodTimeOfDayWithDateRegex = f'\\b((e|[àa]|em|na|no|ao|pel[ao]|de)\\s+)?(?<timeOfDay>manh[ãa]|madrugada|(passado\\s+(o\\s+)?)?meio\\s+dia|tarde|noite)\\b'
RelativeTimeUnitRegex = f'({PastRegex}|{FutureRegex})\\s+{UnitRegex}|{UnitRegex}\\s+({PastRegex}|{FutureRegex})'
SuffixAndRegex = f'(?<suffix>\\s*(e)\\s+(?<suffix_num>meia|(um\\s+)?quarto))'
FollowedUnit = f'^\\s*{UnitRegex}'
LessThanRegex = f'^[.]'
MoreThanRegex = f'^[.]'
DurationNumberCombinedWithUnit = f'\\b(?<num>\\d+(\\,\\d*)?){UnitRegex}'
AnUnitRegex = f'\\b(um(a)?)\\s+{UnitRegex}'
DuringRegex = f'^[.]'
AllRegex = f'\\b(?<all>tod[oa]?\\s+(o|a)\\s+(?<unit>ano|m[êe]s|semana|dia))\\b'
HalfRegex = f'\\b(?<half>mei[oa]\\s+(?<unit>ano|m[êe]s|semana|dia|hora))\\b'
ConjunctionRegex = f'^[.]'
InexactNumberRegex = f'\\b(poucos|pouco|algum|alguns|v[áa]rios)\\b'
InexactNumberUnitRegex = f'\\b(poucos|pouco|algum|alguns|v[áa]rios)\\s+{UnitRegex}'
HolidayRegex1 = f'\\b(?<holiday>sexta-feira santa|sexta-feira da paix[ãa]o|quarta-feira de cinzas|carnaval|dia (de|de los) presidentes?|ano novo chin[eê]s|ano novo|v[ée]spera de ano novo|natal|v[ée]spera de natal|dia de a[cç][ãa]o de gra[çc]as|a[cç][ãa]o de gra[çc]as|yuandan|halloween|dia das bruxas|p[áa]scoa)(\\s+(d[eo]?\\s+)?({YearRegex}|(?<order>(pr[oó]xim[oa]?|[nd]?es[st][ea]|[uú]ltim[oa]?|em))\\s+ano))?\\b'
HolidayRegex2 = f'\\b(?<holiday>(dia\\s+(d[eoa]s?\\s+)?)?(martin luther king|todos os santos|s[ãa]o (patr[íi]cio|francisco|jorge|jo[ãa]o)|independ[êe]ncia))(\\s+(d[eo]?\\s+)?({YearRegex}|(?<order>(pr[oó]xim[oa]?|[nd]?es[st][ea]|[uú]ltim[oa]?|em))\\s+ano))?\\b'
HolidayRegex3 = f'\\b(?<holiday>(dia\\s+d[eoa]s?\\s+)(trabalh(o|ador(es)?)|m[ãa]es?|pais?|mulher(es)?|crian[çc]as?|marmota|professor(es)?))(\\s+(d[eo]?\\s+)?({YearRegex}|(?<order>(pr[oó]xim[oa]?|[nd]?es[st][ea]|[uú]ltim[oa]?|em))\\s+ano))?\\b'
BeforeRegex = f'(antes(\\s+(de|dos?|das?)?)?)'
AfterRegex = f'((depois|ap[óo]s)(\\s*(de|d?os?|d?as?)?)?)'
SinceRegex = f'(desde(\\s+(as?|o))?)'
AroundRegex = f'^[.]'
PeriodicRegex = f'\\b(?<periodic>di[áa]ri[ao]|diariamente|mensalmente|semanalmente|quinzenalmente|anualmente)\\b'
EachExpression = f'cada|tod[oa]s?\\s*([oa]s)?'
EachUnitRegex = f'(?<each>({EachExpression})\\s*{UnitRegex})'
EachPrefixRegex = f'(?<each>({EachExpression})\\s*$)'
EachDayRegex = f'\\s*({EachExpression})\\s*dias\\s*\\b'
BeforeEachDayRegex = f'({EachExpression})\\s*dias(\\s+(as|ao))?\\s*\\b'
SetEachRegex = f'(?<each>({EachExpression})\\s*)'
LaterEarlyPeriodRegex = f'^[.]'
WeekWithWeekDayRangeRegex = f'^[.]'
GeneralEndingRegex = f'^[.]'
MiddlePauseRegex = f'^[.]'
PrefixArticleRegex = f'^[\\.]'
OrRegex = f'^[.]'
YearPlusNumberRegex = f'^[.]'
NumberAsTimeRegex = f'^[.]'
TimeBeforeAfterRegex = f'^[.]'
DateNumberConnectorRegex = f'^[.]'
ComplexDatePeriodRegex = f'^[.]'
AgoRegex = f'\\b(antes|atr[áa]s|no passado)\\b'
LaterRegex = f'\\b(depois d[eoa]s?|ap[óo]s (as)?|desde (as|o)|desde|no futuro|mais tarde)\\b'
Tomorrow = 'amanh[ãa]'
UnitMap = dict([("anos", "Y"),
("ano", "Y"),
("meses", "MON"),
("mes", "MON"),
("mês", "MON"),
("semanas", "W"),
("semana", "W"),
("dias", "D"),
("dia", "D"),
("horas", "H"),
("hora", "H"),
("hrs", "H"),
("hr", "H"),
("h", "H"),
("minutos", "M"),
("minuto", "M"),
("mins", "M"),
("min", "M"),
("segundos", "S"),
("segundo", "S"),
("segs", "S"),
("seg", "S")])
UnitValueMap = dict([("anos", 31536000),
("ano", 31536000),
("meses", 2592000),
("mes", 2592000),
("mês", 2592000),
("semanas", 604800),
("semana", 604800),
("dias", 86400),
("dia", 86400),
("horas", 3600),
("hora", 3600),
("hrs", 3600),
("hr", 3600),
("h", 3600),
("minutos", 60),
("minuto", 60),
("mins", 60),
("min", 60),
("segundos", 1),
("segundo", 1),
("segs", 1),
("seg", 1)])
SpecialYearPrefixesMap = dict([("", "")])
SeasonMap = dict([("primavera", "SP"),
("verao", "SU"),
("verão", "SU"),
("outono", "FA"),
("inverno", "WI")])
SeasonValueMap = dict([("SP", 3),
("SU", 6),
("FA", 9),
("WI", 12)])
CardinalMap = dict([("primeiro", 1),
("primeira", 1),
("1o", 1),
("1a", 1),
("segundo", 2),
("segunda", 2),
("2o", 2),
("2a", 2),
("terceiro", 3),
("terceira", 3),
("3o", 3),
("3a", 3),
("cuarto", 4),
("quarto", 4),
("cuarta", 4),
("quarta", 4),
("4o", 4),
("4a", 4),
("quinto", 5),
("quinta", 5),
("5o", 5),
("5a", 5)])
DayOfWeek = dict([("segunda-feira", 1),
("segundas-feiras", 1),
("segunda feira", 1),
("segundas feiras", 1),
("segunda", 1),
("segundas", 1),
("terça-feira", 2),
("terças-feiras", 2),
("terça feira", 2),
("terças feiras", 2),
("terça", 2),
("terças", 2),
("terca-feira", 2),
("tercas-feiras", 2),
("terca feira", 2),
("tercas feiras", 2),
("terca", 2),
("tercas", 2),
("quarta-feira", 3),
("quartas-feiras", 3),
("quarta feira", 3),
("quartas feiras", 3),
("quarta", 3),
("quartas", 3),
("quinta-feira", 4),
("quintas-feiras", 4),
("quinta feira", 4),
("quintas feiras", 4),
("quinta", 4),
("quintas", 4),
("sexta-feira", 5),
("sextas-feiras", 5),
("sexta feira", 5),
("sextas feiras", 5),
("sexta", 5),
("sextas", 5),
("sabado", 6),
("sabados", 6),
("sábado", 6),
("sábados", 6),
("domingo", 0),
("domingos", 0),
("seg", 1),
("seg.", 1),
("2a", 1),
("ter", 2),
("ter.", 2),
("3a", 2),
("qua", 3),
("qua.", 3),
("4a", 3),
("qui", 4),
("qui.", 4),
("5a", 4),
("sex", 5),
("sex.", 5),
("6a", 5),
("sab", 6),
("sab.", 6),
("dom", 0),
("dom.", 0)])
MonthOfYear = dict([("janeiro", 1),
("fevereiro", 2),
("março", 3),
("marco", 3),
("abril", 4),
("maio", 5),
("junho", 6),
("julho", 7),
("agosto", 8),
("septembro", 9),
("setembro", 9),
("outubro", 10),
("novembro", 11),
("dezembro", 12),
("jan", 1),
("fev", 2),
("mar", 3),
("abr", 4),
("mai", 5),
("jun", 6),
("jul", 7),
("ago", 8),
("sept", 9),
("set", 9),
("out", 10),
("nov", 11),
("dez", 12),
("1", 1),
("2", 2),
("3", 3),
("4", 4),
("5", 5),
("6", 6),
("7", 7),
("8", 8),
("9", 9),
("10", 10),
("11", 11),
("12", 12),
("01", 1),
("02", 2),
("03", 3),
("04", 4),
("05", 5),
("06", 6),
("07", 7),
("08", 8),
("09", 9)])
Numbers = dict([("zero", 0),
("um", 1),
("uma", 1),
("dois", 2),
("tres", 3),
("três", 3),
("quatro", 4),
("cinco", 5),
("seis", 6),
("sete", 7),
("oito", 8),
("nove", 9),
("dez", 10),
("onze", 11),
("doze", 12),
("dezena", 12),
("dezenas", 12),
("treze", 13),
("catorze", 14),
("quatorze", 14),
("quinze", 15),
("dezesseis", 16),
("dezasseis", 16),
("dezessete", 17),
("dezassete", 17),
("dezoito", 18),
("dezenove", 19),
("dezanove", 19),
("vinte", 20),
("vinte e um", 21),
("vinte e uma", 21),
("vinte e dois", 22),
("vinte e duas", 22),
("vinte e tres", 23),
("vinte e três", 23),
("vinte e quatro", 24),
("vinte e cinco", 25),
("vinte e seis", 26),
("vinte e sete", 27),
("vinte e oito", 28),
("vinte e nove", 29),
("trinta", 30)])
HolidayNames = dict([("pai", ["diadopai", "diadospais"]),
("mae", ["diadamae", "diadasmaes"]),
("acaodegracas", ["diadegracas", "diadeacaodegracas", "acaodegracas"]),
("trabalho", ["diadotrabalho", "diadotrabalhador", "diadostrabalhadores"]),
("pascoa", ["diadepascoa", "pascoa"]),
("natal", ["natal", "diadenatal"]),
("vesperadenatal", ["vesperadenatal"]),
("anonovo", ["anonovo", "diadeanonovo", "diadoanonovo"]),
("vesperadeanonovo", ["vesperadeanonovo", "vesperadoanonovo"]),
("yuandan", ["yuandan"]),
("todosossantos", ["todosossantos"]),
("professor", ["diadoprofessor", "diadosprofessores"]),
("crianca", ["diadacrianca", "diadascriancas"]),
("mulher", ["diadamulher"])])
VariableHolidaysTimexDictionary = dict([("pai", "-06-WXX-7-3"),
("mae", "-05-WXX-7-2"),
("acaodegracas", "-11-WXX-4-4"),
("memoria", "-03-WXX-2-4")])
DoubleNumbers = dict([("metade", 0.5),
("quarto", 0.25)])
DateTokenPrefix = 'em '
TimeTokenPrefix = 'as '
TokenBeforeDate = 'o '
TokenBeforeTime = 'as '
UpcomingPrefixRegex = f'.^'
NextPrefixRegex = f'(pr[oó]xim[oa]|seguinte|{UpcomingPrefixRegex})\\b'
PastPrefixRegex = f'.^'
PreviousPrefixRegex = f'([uú]ltim[oa]|{PastPrefixRegex})\\b'
ThisPrefixRegex = f'([nd]?es[st][ea])\\b'
RelativeDayRegex = f'^[\\.]'
RestOfDateRegex = f'^[\\.]'
RelativeDurationUnitRegex = f'^[\\.]'
ReferenceDatePeriodRegex = f'^[.]'
FromToRegex = f'\\b(from).+(to)\\b.+'
SingleAmbiguousMonthRegex = f'^(the\\s+)?(may|march)$'
UnspecificDatePeriodRegex = f'^[.]'
PrepositionSuffixRegex = f'\\b(on|in|at|around|from|to)$'
RestOfDateTimeRegex = f'^[\\.]'
SetWeekDayRegex = f'^[\\.]'
NightRegex = f'\\b(meia noite|noite|de noite)\\b'
CommonDatePrefixRegex = f'\\b(dia)\\s+$'
DurationUnitRegex = f'^[\\.]'
DurationConnectorRegex = f'^[.]'
CenturyRegex = f'^[.]'
DecadeRegex = f'^[.]'
DecadeWithCenturyRegex = f'^[.]'
RelativeDecadeRegex = f'^[.]'
YearSuffix = f'((,|\\sde)?\\s*({YearRegex}|{FullTextYearRegex}))'
SuffixAfterRegex = f'^[.]'
YearPeriodRegex = f'^[.]'
FutureSuffixRegex = f'^[.]'
WrittenDecades = dict([("", 0)])
SpecialDecadeCases = dict([("", 0)])
DefaultLanguageFallback = 'DMY'
DurationDateRestrictions = []
AmbiguityFiltersDict = dict([("null", "null")])
EarlyMorningTermList = [r'madrugada']
MorningTermList = [r'manha', r'manhã']
AfternoonTermList = [r'passado o meio dia', r'depois do meio dia']
EveningTermList = [r'tarde']
NightTermList = [r'noite']
SameDayTerms = [r'hoje', r'este dia', r'esse dia', r'o dia']
PlusOneDayTerms = [r'amanha', r'de amanha', r'dia seguinte', r'o dia de amanha', r'proximo dia']
MinusOneDayTerms = [r'ontem', r'ultimo dia']
PlusTwoDayTerms = [r'depois de amanha', r'dia depois de amanha']
MinusTwoDayTerms = [r'anteontem', r'dia antes de ontem']
MonthTerms = [r'mes', r'meses']
MonthToDateTerms = [r'mes ate agora', r'mes ate hoje', r'mes ate a data']
WeekendTerms = [r'fim de semana']
WeekTerms = [r'semana']
YearTerms = [r'ano', r'anos']
YearToDateTerms = [r'ano ate agora', r'ano ate hoje', r'ano ate a data', r'anos ate agora', r'anos ate hoje', r'anos ate a data']
SpecialCharactersEquivalent = dict([("á", "a"),
("é", "e"),
("í", "i"),
("ó", "o"),
("ú", "u"),
("ê", "e"),
("ô", "o"),
("ü", "u"),
("ã", "a"),
("õ", "o"),
("ç", "c")])
# pylint: enable=line-too-long
| 64.003937
| 419
| 0.482039
|
ca0389f8409a1ba780ef4d8f00868c8b02b93926
| 517
|
py
|
Python
|
Python-Projects/Random Number Guessing Game/random_no_guessing_game.py
|
kuwarkapur/Hacktoberfest-2022
|
efaafeba5ce51d8d2e2d94c6326cc20bff946f17
|
[
"MIT"
] | 1
|
2021-12-03T09:23:41.000Z
|
2021-12-03T09:23:41.000Z
|
Python-Projects/Random Number Guessing Game/random_no_guessing_game.py
|
kuwarkapur/Hacktoberfest-2022
|
efaafeba5ce51d8d2e2d94c6326cc20bff946f17
|
[
"MIT"
] | null | null | null |
Python-Projects/Random Number Guessing Game/random_no_guessing_game.py
|
kuwarkapur/Hacktoberfest-2022
|
efaafeba5ce51d8d2e2d94c6326cc20bff946f17
|
[
"MIT"
] | null | null | null |
import random
num = random.randint(1, 100)
user_input = 0
count = 0
while user_input != num:
user_input = int(input("Enter your guess "))
if user_input < num:
print("Your number is less")
print("Try Again")
elif user_input > num:
print("Your number is much")
print("Try Again")
else:
print("Congratulations you guessed the right number")
count += 1
print("Kudos to you!!")
print("You took", str(count), "trials for guessing the correct number")
| 19.884615
| 71
| 0.62089
|
c50eea79f85a5b758aa466d3bb0359557d2eab7c
| 211
|
py
|
Python
|
neurotic/examples/internal/paths.py
|
gigaquads/neurotic
|
ce3d57acf8fa8256af55fc65a213ecb99dfb3f07
|
[
"MIT"
] | null | null | null |
neurotic/examples/internal/paths.py
|
gigaquads/neurotic
|
ce3d57acf8fa8256af55fc65a213ecb99dfb3f07
|
[
"MIT"
] | null | null | null |
neurotic/examples/internal/paths.py
|
gigaquads/neurotic
|
ce3d57acf8fa8256af55fc65a213ecb99dfb3f07
|
[
"MIT"
] | null | null | null |
import os
DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
TITANIC_CSV_FILEPATH = os.path.join(DATA_DIR, 'titanic.csv')
JENA_CLIMATE_CSV_FILEPATH = os.path.join(DATA_DIR, 'jena_climate_2009_2016.csv')
| 42.2
| 80
| 0.791469
|
1772a508f28a639e2e47a3e3c85f76e1e18b4977
| 12,643
|
py
|
Python
|
src/tenants/management/commands/update_lastseen.py
|
litedesk/litedesk-webserver-provision
|
1576b9d3e5e2e64d1136d276767c2710cfb1938f
|
[
"Apache-2.0"
] | 1
|
2016-01-18T08:19:22.000Z
|
2016-01-18T08:19:22.000Z
|
src/tenants/management/commands/update_lastseen.py
|
litedesk/litedesk-webserver-provision
|
1576b9d3e5e2e64d1136d276767c2710cfb1938f
|
[
"Apache-2.0"
] | null | null | null |
src/tenants/management/commands/update_lastseen.py
|
litedesk/litedesk-webserver-provision
|
1576b9d3e5e2e64d1136d276767c2710cfb1938f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2014, Deutsche Telekom AG - Laboratories (T-Labs)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from optparse import make_option
from apiclient import errors
from apiclient.discovery import build
from dateutil import parser
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from oauth2client.client import SignedJwtAssertionCredentials
import httplib2
import pytz
from provisioning import models
class Command(BaseCommand):
# Check
# https://developers.google.com/admin-sdk/directory/v1/guides/authorizing
# for all available scopes
OAUTH_SCOPE = ['https://www.googleapis.com/auth/admin.directory.user',
'https://www.googleapis.com/auth/admin.directory.device.chromeos']
help = 'Updates the last seen timestamp for provisioned services.'
option_list = BaseCommand.option_list + (
make_option('--skip-okta',
action='store_true',
dest='skip-okta',
default=False,
help='Do not query Okta. Default=False'),
make_option('--skip-google',
action='store_true',
dest='skip-google',
default=False,
help='Do not query Google. Default=False'),
make_option('--skip-airwatch',
action='store_true',
dest='skip-airwatch',
default=False,
help='Do not query AirWatch. Default=False'),
make_option('--tenant',
dest='tenant',
default=1,
help='Tenant id to do this for. Default=1'),
)
def _parseDateTime(self, stamp):
parsed = parser.parse(stamp)
utc = parsed.astimezone(pytz.utc)
stripped = utc.replace(tzinfo=None)
return stripped
def handle(self, *args, **options):
tenant = models.Tenant.objects.get(pk=options['tenant'])
okta_item = models.Okta.objects.get(tenant=tenant)
users = models.User.objects.filter(services=okta_item)
software_contenttype = ContentType.objects.get_for_model(
models.Software)
google_software = models.Software.objects.get(name='Google Account')
device_contenttype = ContentType.objects.get_for_model(
models.Device)
self.stdout.write("Okta users in database.")
user_dict = {}
for user in users:
username = '%s@%s' % (user.username, tenant.email_domain)
user_dict[username] = {'username': user.username, 'user': user}
self.stdout.write(username)
if not options['skip-okta']:
self.stdout.write("")
self.stdout.write("Get Okta user logins.")
okta_users = okta_item.get_users()
okta_item_type = ContentType.objects.get_for_model(okta_item)
for okta_user in okta_users:
okta_username = okta_user['profile']['login']
if okta_username in user_dict:
user_dict[okta_username].update(
{'okta_id': okta_user['id']})
if okta_user['lastLogin']:
models.LastSeenEvent.objects.create(
user=user_dict[okta_username]['user'],
item_type=okta_item_type,
object_id=okta_item.id,
last_seen=self._parseDateTime(okta_user['lastLogin']))
self.stdout.write(
'%s - %s' % (okta_username, okta_user['lastLogin']))
# Get Okta application SSO events
self.stdout.write("")
self.stdout.write("Get Okta SSO events.")
okta_client = okta_item.get_client()
usersoftwares = models.UserProvisionable.objects.filter(
user__tenant=tenant,
item_type=software_contenttype,
service=okta_item).exclude(
object_id=google_software.id)
# Google accoint login is done below directly from google
for usersoftware in usersoftwares:
oktatenantservice = usersoftware.item.tenantserviceasset_set.get(
service=okta_item)
event = okta_client.last_sso_event(
user_dict[usersoftware.user.tenant_email]['okta_id'],
oktatenantservice.get('application_id'))
if event:
models.LastSeenEvent.objects.create(
user=usersoftware.user,
item_type=software_contenttype,
object_id=usersoftware.object_id,
last_seen=self._parseDateTime(event['published']))
self.stdout.write(
'%s - %s -> %s' % (usersoftware.user.tenant_email,
usersoftware.item.name,
event and event['published'] or "never"))
if not options['skip-google']:
# Get Google lastseen
google_tenant_asset = tenant.tenantasset_set.get(
asset__name='Google Account')
# Run through the OAuth flow and retrieve credentials
certificate_file_path = os.path.join(
settings.CERTIFICATES_DIR, google_tenant_asset.get('CERTIFICATE_FILE_NAME')
)
with open(certificate_file_path) as f:
private_key = f.read()
credentials = SignedJwtAssertionCredentials(
google_tenant_asset.get('CLIENT_EMAIL'),
private_key,
scope=self.OAUTH_SCOPE,
sub=google_tenant_asset.get('ADMINISTRATOR')
)
# Create an httplib2.Http object and authorize it with our
# credentials
http = httplib2.Http()
http = credentials.authorize(http)
directory_service = build('admin', 'directory_v1', http=http)
# Get Google Account lastseen information
all_users = []
page_token = None
params = {'customer': 'my_customer'}
self.stdout.write("")
self.stdout.write("Get Google Account users")
while True:
try:
if page_token:
params['pageToken'] = page_token
current_page = directory_service.users().list(
**params).execute()
all_users.extend(current_page['users'])
page_token = current_page.get('nextPageToken')
if not page_token:
break
except errors.HttpError as error:
self.stderr.write('An error occurred: %s' % error)
break
for user in all_users:
if user['lastLoginTime'] == '1970-01-01T00:00:00.000Z':
continue
if models.UserProvisionable.objects.filter(
user__username=user['primaryEmail'].split('@')[0],
user__tenant=tenant,
item_type=software_contenttype,
object_id=google_software.id).exists():
models.LastSeenEvent.objects.create(
user=user_dict[user['primaryEmail']]['user'],
item_type=software_contenttype,
object_id=google_software.id,
last_seen=self._parseDateTime(user['lastLoginTime']))
self.stdout.write(
user['primaryEmail'] + " - " + user['lastLoginTime'])
# Get Google Device lastseen information
all_devices = []
page_token = None
params = {'customerId': 'my_customer'}
while True:
try:
if page_token:
params['pageToken'] = page_token
current_page = directory_service.chromeosdevices().list(
**params).execute()
all_devices.extend(current_page['chromeosdevices'])
page_token = current_page.get('nextPageToken')
if not page_token:
break
except errors.HttpError as error:
self.stderr.write('An error occurred: %s' % error)
break
self.stdout.write("")
self.stdout.write("Get Google Devices")
chromebook_device = models.Device.objects.get(name='Chromebook')
for device in all_devices:
if models.UserProvisionable.objects.filter(
user__username=device['annotatedUser'].split('@')[0],
user__tenant=tenant,
item_type=device_contenttype,
object_id=chromebook_device.id).exists():
models.LastSeenEvent.objects.create(
user=user_dict[device['annotatedUser']]['user'],
item_type=device_contenttype,
object_id=chromebook_device.id,
last_seen=self._parseDateTime(device['lastSync']))
self.stdout.write('%s - %s -> %s' % (device['annotatedUser'],
device[
'serialNumber'],
device['lastSync']))
if not options['skip-airwatch']:
self.stdout.write("")
self.stdout.write("Get AirWatch Devices & Platform usage")
airwatch_item = models.AirWatch.objects.get(tenant=tenant)
airwatch_client = airwatch_item.get_client()
endpoint = 'mdm/devices/search'
iPad_device = models.Device.objects.get(name='iPad')
iPhone_device = models.Device.objects.get(name='iPhone')
airwatch_item_type = ContentType.objects.get_for_model(airwatch_item)
airwatch_users = models.User.objects.filter(services=airwatch_item)
for user in airwatch_users:
response = airwatch_client.call_api(
'GET', endpoint, params={'user': user.username})
response.raise_for_status()
if response.status_code == 200:
devices = response.json().get('Devices')
newest_seen = parser.parse(devices[0]['LastSeen'])
for device in devices:
seen = parser.parse(device['LastSeen'])
if seen > newest_seen:
newest_seen = seen
if device['Model'].startswith(iPad_device.name):
device_item = iPad_device
elif device['Model'].startswith(iPhone_device.name):
device_item = iPhone_device
else:
device_item = None
models.LastSeenEvent.objects.create(
user=user,
item_type=device_contenttype,
object_id=device_item.id,
last_seen=seen)
self.stdout.write(
"%s - %s -> %s" % (user, device['SerialNumber'],
device['LastSeen']))
self.stdout.write("%s -> %s" % (user, newest_seen))
models.LastSeenEvent.objects.create(
user=user,
item_type=airwatch_item_type,
object_id=airwatch_item.id,
last_seen=newest_seen)
| 45.315412
| 91
| 0.536423
|
73dae20608e722f58a5213c0d97a8e567bc988b6
| 1,950
|
py
|
Python
|
jccli/helpers.py
|
zaro0508/jccli
|
1de9a7f493d14bbbe6f3d201eb1aa989cdeec5bb
|
[
"Apache-2.0"
] | null | null | null |
jccli/helpers.py
|
zaro0508/jccli
|
1de9a7f493d14bbbe6f3d201eb1aa989cdeec5bb
|
[
"Apache-2.0"
] | null | null | null |
jccli/helpers.py
|
zaro0508/jccli
|
1de9a7f493d14bbbe6f3d201eb1aa989cdeec5bb
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
.. currentmodule:: jccli.helpers.py
.. moduleauthor:: zaro0508 <zaro0508@gmail.com>
This is a set of helper methods
"""
import json
import yaml
def class_to_dict(class_object):
"""
Convert a list of jumpcloud users to a list of dicts
"""
new_obj = []
for item in class_object:
new_obj.append(item)
return new_obj
def get_users_from_file(data_file):
"""
Get users from a data file
:param data_file:
:return: a list of SystemUsers
"""
users = []
try:
with open(data_file, 'r') as file:
jc_config = yaml.safe_load(file)
users = jc_config['users']
except (KeyError) as error:
pass
except Exception as error:
raise error
return users
def get_groups_from_file(data_file):
"""
Get groups from a data file
:param data_file: data file
:return: a list of jumpcloud groups
"""
groups = []
try:
with open(data_file, 'r') as file:
jc_config = yaml.safe_load(file)
groups = jc_config['groups']
except (KeyError) as error:
pass
except Exception as error:
raise error
return groups
def get_user_from_term(input):
"""
Get user from an input string
example:
jccli create-user \
--json "{\"email\": \"jc.tester1@sagebase.org\", \"username\": \"jctester1\"}"
:param user_file:
:return: a SystemUser
"""
user = {}
if input != "":
try:
user = json.loads(input.replace("'", '"'))
except Exception as error:
raise error
return user
def get_user_from_file(user_file):
"""
Get users from a file
:param user_file:
:return: a list of SystemUsers
"""
user = {}
try:
with open(user_file, 'r') as file:
user = json.load(file)
except Exception as error:
raise error
return user
| 20.3125
| 83
| 0.58359
|
9878f58b5a9a4a97b43fa2b3d0c711b65d1ce554
| 10,183
|
py
|
Python
|
src/oci/opsi/models/exadata_member_collection.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 249
|
2017-09-11T22:06:05.000Z
|
2022-03-04T17:09:29.000Z
|
src/oci/opsi/models/exadata_member_collection.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 228
|
2017-09-11T23:07:26.000Z
|
2022-03-23T10:58:50.000Z
|
src/oci/opsi/models/exadata_member_collection.py
|
ezequielramos/oci-python-sdk
|
cc4235cf217beaf9feed75760e9ce82610222762
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 224
|
2017-09-27T07:32:43.000Z
|
2022-03-25T16:55:42.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ExadataMemberCollection(object):
"""
Partial definition of the exadata insight resource.
"""
#: A constant which can be used with the exadata_type property of a ExadataMemberCollection.
#: This constant has a value of "DBMACHINE"
EXADATA_TYPE_DBMACHINE = "DBMACHINE"
#: A constant which can be used with the exadata_type property of a ExadataMemberCollection.
#: This constant has a value of "EXACS"
EXADATA_TYPE_EXACS = "EXACS"
#: A constant which can be used with the exadata_type property of a ExadataMemberCollection.
#: This constant has a value of "EXACC"
EXADATA_TYPE_EXACC = "EXACC"
#: A constant which can be used with the exadata_rack_type property of a ExadataMemberCollection.
#: This constant has a value of "FULL"
EXADATA_RACK_TYPE_FULL = "FULL"
#: A constant which can be used with the exadata_rack_type property of a ExadataMemberCollection.
#: This constant has a value of "HALF"
EXADATA_RACK_TYPE_HALF = "HALF"
#: A constant which can be used with the exadata_rack_type property of a ExadataMemberCollection.
#: This constant has a value of "QUARTER"
EXADATA_RACK_TYPE_QUARTER = "QUARTER"
#: A constant which can be used with the exadata_rack_type property of a ExadataMemberCollection.
#: This constant has a value of "EIGHTH"
EXADATA_RACK_TYPE_EIGHTH = "EIGHTH"
def __init__(self, **kwargs):
"""
Initializes a new ExadataMemberCollection object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param exadata_insight_id:
The value to assign to the exadata_insight_id property of this ExadataMemberCollection.
:type exadata_insight_id: str
:param exadata_name:
The value to assign to the exadata_name property of this ExadataMemberCollection.
:type exadata_name: str
:param exadata_display_name:
The value to assign to the exadata_display_name property of this ExadataMemberCollection.
:type exadata_display_name: str
:param exadata_type:
The value to assign to the exadata_type property of this ExadataMemberCollection.
Allowed values for this property are: "DBMACHINE", "EXACS", "EXACC", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type exadata_type: str
:param exadata_rack_type:
The value to assign to the exadata_rack_type property of this ExadataMemberCollection.
Allowed values for this property are: "FULL", "HALF", "QUARTER", "EIGHTH", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type exadata_rack_type: str
:param items:
The value to assign to the items property of this ExadataMemberCollection.
:type items: list[oci.opsi.models.ExadataMemberSummary]
"""
self.swagger_types = {
'exadata_insight_id': 'str',
'exadata_name': 'str',
'exadata_display_name': 'str',
'exadata_type': 'str',
'exadata_rack_type': 'str',
'items': 'list[ExadataMemberSummary]'
}
self.attribute_map = {
'exadata_insight_id': 'exadataInsightId',
'exadata_name': 'exadataName',
'exadata_display_name': 'exadataDisplayName',
'exadata_type': 'exadataType',
'exadata_rack_type': 'exadataRackType',
'items': 'items'
}
self._exadata_insight_id = None
self._exadata_name = None
self._exadata_display_name = None
self._exadata_type = None
self._exadata_rack_type = None
self._items = None
@property
def exadata_insight_id(self):
"""
**[Required]** Gets the exadata_insight_id of this ExadataMemberCollection.
The `OCID`__ of the Exadata insight.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The exadata_insight_id of this ExadataMemberCollection.
:rtype: str
"""
return self._exadata_insight_id
@exadata_insight_id.setter
def exadata_insight_id(self, exadata_insight_id):
"""
Sets the exadata_insight_id of this ExadataMemberCollection.
The `OCID`__ of the Exadata insight.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param exadata_insight_id: The exadata_insight_id of this ExadataMemberCollection.
:type: str
"""
self._exadata_insight_id = exadata_insight_id
@property
def exadata_name(self):
"""
**[Required]** Gets the exadata_name of this ExadataMemberCollection.
The Exadata system name. If the Exadata systems managed by Enterprise Manager, the name is unique amongst the Exadata systems managed by the same Enterprise Manager.
:return: The exadata_name of this ExadataMemberCollection.
:rtype: str
"""
return self._exadata_name
@exadata_name.setter
def exadata_name(self, exadata_name):
"""
Sets the exadata_name of this ExadataMemberCollection.
The Exadata system name. If the Exadata systems managed by Enterprise Manager, the name is unique amongst the Exadata systems managed by the same Enterprise Manager.
:param exadata_name: The exadata_name of this ExadataMemberCollection.
:type: str
"""
self._exadata_name = exadata_name
@property
def exadata_display_name(self):
"""
**[Required]** Gets the exadata_display_name of this ExadataMemberCollection.
The user-friendly name for the Exadata system. The name does not have to be unique.
:return: The exadata_display_name of this ExadataMemberCollection.
:rtype: str
"""
return self._exadata_display_name
@exadata_display_name.setter
def exadata_display_name(self, exadata_display_name):
"""
Sets the exadata_display_name of this ExadataMemberCollection.
The user-friendly name for the Exadata system. The name does not have to be unique.
:param exadata_display_name: The exadata_display_name of this ExadataMemberCollection.
:type: str
"""
self._exadata_display_name = exadata_display_name
@property
def exadata_type(self):
"""
**[Required]** Gets the exadata_type of this ExadataMemberCollection.
Operations Insights internal representation of the the Exadata system type.
Allowed values for this property are: "DBMACHINE", "EXACS", "EXACC", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The exadata_type of this ExadataMemberCollection.
:rtype: str
"""
return self._exadata_type
@exadata_type.setter
def exadata_type(self, exadata_type):
"""
Sets the exadata_type of this ExadataMemberCollection.
Operations Insights internal representation of the the Exadata system type.
:param exadata_type: The exadata_type of this ExadataMemberCollection.
:type: str
"""
allowed_values = ["DBMACHINE", "EXACS", "EXACC"]
if not value_allowed_none_or_none_sentinel(exadata_type, allowed_values):
exadata_type = 'UNKNOWN_ENUM_VALUE'
self._exadata_type = exadata_type
@property
def exadata_rack_type(self):
"""
**[Required]** Gets the exadata_rack_type of this ExadataMemberCollection.
Exadata rack type.
Allowed values for this property are: "FULL", "HALF", "QUARTER", "EIGHTH", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The exadata_rack_type of this ExadataMemberCollection.
:rtype: str
"""
return self._exadata_rack_type
@exadata_rack_type.setter
def exadata_rack_type(self, exadata_rack_type):
"""
Sets the exadata_rack_type of this ExadataMemberCollection.
Exadata rack type.
:param exadata_rack_type: The exadata_rack_type of this ExadataMemberCollection.
:type: str
"""
allowed_values = ["FULL", "HALF", "QUARTER", "EIGHTH"]
if not value_allowed_none_or_none_sentinel(exadata_rack_type, allowed_values):
exadata_rack_type = 'UNKNOWN_ENUM_VALUE'
self._exadata_rack_type = exadata_rack_type
@property
def items(self):
"""
**[Required]** Gets the items of this ExadataMemberCollection.
Collection of Exadata members
:return: The items of this ExadataMemberCollection.
:rtype: list[oci.opsi.models.ExadataMemberSummary]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this ExadataMemberCollection.
Collection of Exadata members
:param items: The items of this ExadataMemberCollection.
:type: list[oci.opsi.models.ExadataMemberSummary]
"""
self._items = items
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 37.164234
| 245
| 0.68143
|
1dbd983eb7aa8564c1eea73a4cfc4cab42dfb8ba
| 668
|
py
|
Python
|
mooring/migrations/0126_auto_20190410_1625.py
|
jawaidm/moorings
|
22db3fa5917fb13cbee144e64529221ef862cb39
|
[
"Apache-2.0"
] | null | null | null |
mooring/migrations/0126_auto_20190410_1625.py
|
jawaidm/moorings
|
22db3fa5917fb13cbee144e64529221ef862cb39
|
[
"Apache-2.0"
] | 2
|
2020-04-30T12:02:15.000Z
|
2021-03-19T22:41:46.000Z
|
mooring/migrations/0126_auto_20190410_1625.py
|
jawaidm/moorings
|
22db3fa5917fb13cbee144e64529221ef862cb39
|
[
"Apache-2.0"
] | 6
|
2020-01-13T08:45:09.000Z
|
2021-02-24T03:31:02.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-04-10 08:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mooring', '0125_auto_20190404_1046'),
]
operations = [
migrations.AlterField(
model_name='mooringarea',
name='additional_info',
field=models.TextField(blank=True, default='', null=True),
),
migrations.AlterField(
model_name='mooringarea',
name='description',
field=models.TextField(blank=True, default='', null=True),
),
]
| 25.692308
| 70
| 0.60479
|
f9cea16808c36fc55f74ad82ccb920618eb2e3e3
| 38
|
py
|
Python
|
pytoda/__init__.py
|
PaccMann/paccmann_datasets
|
0cb0cee349ffab8e227f09f7df0a8bca6a71f22e
|
[
"MIT"
] | 14
|
2019-11-01T12:45:56.000Z
|
2022-03-11T15:38:31.000Z
|
pytoda/__init__.py
|
PaccMann/paccmann_datasets
|
0cb0cee349ffab8e227f09f7df0a8bca6a71f22e
|
[
"MIT"
] | 74
|
2019-11-12T19:36:27.000Z
|
2022-02-28T08:19:37.000Z
|
pytoda/__init__.py
|
PaccMann/paccmann_datasets
|
0cb0cee349ffab8e227f09f7df0a8bca6a71f22e
|
[
"MIT"
] | 2
|
2021-08-14T11:15:07.000Z
|
2021-08-25T06:42:01.000Z
|
name = 'pytoda'
__version__ = '0.2.5'
| 12.666667
| 21
| 0.631579
|
e649988f85d9af77e34c1a8deaa752aa59058e7e
| 225
|
py
|
Python
|
src/Dominion/Gametable/Cardstack.py
|
CarlGathmann/Dominion
|
dc30a6db5f005d1148c5ddb4346a8d2b4397001c
|
[
"MIT"
] | null | null | null |
src/Dominion/Gametable/Cardstack.py
|
CarlGathmann/Dominion
|
dc30a6db5f005d1148c5ddb4346a8d2b4397001c
|
[
"MIT"
] | null | null | null |
src/Dominion/Gametable/Cardstack.py
|
CarlGathmann/Dominion
|
dc30a6db5f005d1148c5ddb4346a8d2b4397001c
|
[
"MIT"
] | null | null | null |
from typing import TypeVar, List
from src.Dominion import Card
T = TypeVar('T', bound=Card)
class Cardstack:
def __init__(self):
self.cards: List[T] = None
def size(self):
return len(self.cards)
| 15
| 34
| 0.648889
|
aac4563eb4e2c22d01351c82284078baaf3a2947
| 3,537
|
py
|
Python
|
airflow/providers/airbyte/operators/airbyte.py
|
ChaseKnowlden/airflow
|
6b71eac1997a7c0db3b8e3aed6b4e65d01871440
|
[
"Apache-2.0"
] | 15,947
|
2019-01-05T13:51:02.000Z
|
2022-03-31T23:33:16.000Z
|
airflow/providers/airbyte/operators/airbyte.py
|
ChaseKnowlden/airflow
|
6b71eac1997a7c0db3b8e3aed6b4e65d01871440
|
[
"Apache-2.0"
] | 14,603
|
2019-01-05T09:43:19.000Z
|
2022-03-31T23:11:59.000Z
|
airflow/providers/airbyte/operators/airbyte.py
|
ChaseKnowlden/airflow
|
6b71eac1997a7c0db3b8e3aed6b4e65d01871440
|
[
"Apache-2.0"
] | 8,429
|
2019-01-05T19:45:47.000Z
|
2022-03-31T22:13:01.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Optional
from airflow.models import BaseOperator
from airflow.providers.airbyte.hooks.airbyte import AirbyteHook
class AirbyteTriggerSyncOperator(BaseOperator):
"""
This operator allows you to submit a job to an Airbyte server to run a integration
process between your source and destination.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:AirbyteTriggerSyncOperator`
:param airbyte_conn_id: Required. The name of the Airflow connection to get connection
information for Airbyte.
:type airbyte_conn_id: str
:param connection_id: Required. The Airbyte ConnectionId UUID between a source and destination.
:type connection_id: str
:param asynchronous: Optional. Flag to get job_id after submitting the job to the Airbyte API.
This is useful for submitting long running jobs and
waiting on them asynchronously using the AirbyteJobSensor.
:type asynchronous: bool
:param api_version: Optional. Airbyte API version.
:type api_version: str
:param wait_seconds: Optional. Number of seconds between checks. Only used when ``asynchronous`` is False.
:type wait_seconds: float
:param timeout: Optional. The amount of time, in seconds, to wait for the request to complete.
Only used when ``asynchronous`` is False.
:type timeout: float
"""
template_fields = ('connection_id',)
def __init__(
self,
connection_id: str,
airbyte_conn_id: str = "airbyte_default",
asynchronous: Optional[bool] = False,
api_version: Optional[str] = "v1",
wait_seconds: Optional[float] = 3,
timeout: Optional[float] = 3600,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.airbyte_conn_id = airbyte_conn_id
self.connection_id = connection_id
self.timeout = timeout
self.api_version = api_version
self.wait_seconds = wait_seconds
self.asynchronous = asynchronous
def execute(self, context) -> None:
"""Create Airbyte Job and wait to finish"""
hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version)
job_object = hook.submit_sync_connection(connection_id=self.connection_id)
job_id = job_object.json()['job']['id']
self.log.info("Job %s was submitted to Airbyte Server", job_id)
if not self.asynchronous:
self.log.info('Waiting for job %s to complete', job_id)
hook.wait_for_job(job_id=job_id, wait_seconds=self.wait_seconds, timeout=self.timeout)
self.log.info('Job %s completed successfully', job_id)
return job_id
| 42.107143
| 110
| 0.711055
|
3fefd0f85758bcef7e1384b8099aac27ddf84ef2
| 7,744
|
py
|
Python
|
source/operators/translate/start_translate.py
|
hasanp87/aws-media-insights-engine
|
45d093e450a11d4c919d1d6f9a0f0017b2ae5a44
|
[
"Apache-2.0"
] | 1
|
2020-09-11T19:12:21.000Z
|
2020-09-11T19:12:21.000Z
|
source/operators/translate/start_translate.py
|
hasanp87/aws-media-insights-engine
|
45d093e450a11d4c919d1d6f9a0f0017b2ae5a44
|
[
"Apache-2.0"
] | null | null | null |
source/operators/translate/start_translate.py
|
hasanp87/aws-media-insights-engine
|
45d093e450a11d4c919d1d6f9a0f0017b2ae5a44
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import boto3
import os
import json
from botocore import config
import math
import nltk.data
from MediaInsightsEngineLambdaHelper import DataPlane
from MediaInsightsEngineLambdaHelper import MediaInsightsOperationHelper
from MediaInsightsEngineLambdaHelper import MasExecutionError
mie_config = json.loads(os.environ['botoConfig'])
config = config.Config(**mie_config)
translate_client = boto3.client('translate', config=config)
s3 = boto3.client('s3')
def lambda_handler(event, context):
print("We got the following event:\n", event)
operator_object = MediaInsightsOperationHelper(event)
try:
bucket = operator_object.input["Media"]["Text"]["S3Bucket"]
key = operator_object.input["Media"]["Text"]["S3Key"]
except KeyError as e:
operator_object.update_workflow_status("Error")
operator_object.add_workflow_metadata(TranslateError="No valid inputs {e}".format(e=e))
raise MasExecutionError(operator_object.return_output_object())
try:
workflow_id = operator_object.workflow_execution_id
except KeyError as e:
operator_object.update_workflow_status("Error")
operator_object.add_workflow_metadata(TranslateError="Missing a required metadata key {e}".format(e=e))
raise MasExecutionError(operator_object.return_output_object())
try:
asset_id = operator_object.asset_id
except KeyError:
print('No asset id for this workflow')
asset_id = ''
try:
source_lang = operator_object.configuration["SourceLanguageCode"]
target_lang = operator_object.configuration["TargetLanguageCode"]
except KeyError:
operator_object.update_workflow_status("Error")
operator_object.add_workflow_metadata(TranslateError="Language codes are not defined")
raise MasExecutionError(operator_object.return_output_object())
try:
s3_response = s3.get_object(Bucket=bucket, Key=key)
transcribe_metadata = json.loads(s3_response["Body"].read().decode("utf-8"))
transcript = transcribe_metadata["results"]["transcripts"][0]["transcript"]
except Exception as e:
operator_object.update_workflow_status("Error")
operator_object.add_workflow_metadata(TranslateError="Unable to read transcription from S3: {e}".format(e=str(e)))
raise MasExecutionError(operator_object.return_output_object())
# If input text is empty then we're done.
if len(transcript) < 1:
operator_object.update_workflow_status("Complete")
return operator_object.return_output_object()
# Tell the NLTK data loader to look for files in /tmp/
nltk.data.path.append("/tmp/")
# Download NLTK tokenizers to /tmp/
# We use /tmp because that's where AWS Lambda provides write access to the local file system.
nltk.download('punkt', download_dir='/tmp/')
# Create language tokenizer according to user-specified source language.
# Default to English.
if source_lang == 'fr':
print("Using French dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/french.pickle')
elif source_lang == 'de':
print("Using German dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/german.pickle')
elif source_lang == 're':
print("Using Russian dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/russian.pickle')
elif source_lang == 'it':
print("Using Italian dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/italian.pickle')
elif source_lang == 'pt':
print("Using Portuguese dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/portuguese.pickle')
elif source_lang == 'es':
print("Using Spanish dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/spanish.pickle')
else:
print("Using English dictionary to find sentence boundaries.")
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
# Split input text into a list of sentences
sentences = tokenizer.tokenize(transcript)
print("Input text length: " + str(len(transcript)))
print("Number of sentences: " + str(len(sentences)))
translated_text = ''
transcript_chunk = ''
for sentence in sentences:
# Translate can handle 5000 unicode characters but we'll process no more than 4000
# just to be on the safe side.
if (len(sentence) + len(transcript_chunk) < 4000):
transcript_chunk = transcript_chunk + ' ' + sentence
else:
try:
print("Translation input text length: " + str(len(transcript_chunk)))
translation_chunk = translate_client.translate_text(Text=transcript_chunk,SourceLanguageCode=source_lang,TargetLanguageCode=target_lang)
print("Translation output text length: " + str(len(translation_chunk)))
except Exception as e:
operator_object.update_workflow_status("Error")
operator_object.add_workflow_metadata(TranslateError="Unable to get response from translate: {e}".format(e=str(e)))
raise MasExecutionError(operator_object.return_output_object())
translated_text = translated_text + ' ' + translation_chunk["TranslatedText"]
transcript_chunk = sentence
print("Translating the final chunk of input text...")
try:
print("Translation input text length: " + str(len(transcript_chunk)))
translation_chunk = translate_client.translate_text(Text=transcript_chunk,SourceLanguageCode=source_lang,TargetLanguageCode=target_lang)
print("Translation output text length: " + str(len(translation_chunk)))
except Exception as e:
operator_object.update_workflow_status("Error")
operator_object.add_workflow_metadata(TranslateError="Unable to get response from translate: {e}".format(e=str(e)))
raise MasExecutionError(operator_object.return_output_object())
translated_text = translated_text + ' ' + translation_chunk["TranslatedText"]
# Put final result into a JSON object because the MIE dataplane requires it to be so.
translation_result = {}
translation_result["TranslatedText"] = translated_text
translation_result["SourceLanguageCode"] = source_lang
translation_result["TargetLanguageCode"] = target_lang
print("Final translation text length: " + str(len(translated_text)))
dataplane = DataPlane()
metadata_upload = dataplane.store_asset_metadata(asset_id, operator_object.name, workflow_id, translation_result)
if "Status" not in metadata_upload:
operator_object.add_workflow_metadata(
TranslateError="Unable to upload metadata for asset: {asset}".format(asset=asset_id))
operator_object.update_workflow_status("Error")
raise MasExecutionError(operator_object.return_output_object())
else:
if metadata_upload['Status'] == 'Success':
operator_object.add_media_object('Text', metadata_upload['Bucket'], metadata_upload['Key'])
operator_object.update_workflow_status("Complete")
return operator_object.return_output_object()
else:
operator_object.add_workflow_metadata(
TranslateError="Unable to upload metadata for asset: {asset}".format(asset=asset_id))
operator_object.update_workflow_status("Error")
raise MasExecutionError(operator_object.return_output_object())
| 50.614379
| 152
| 0.715651
|
28f0c9752910e645ae432be328faf121d2c57538
| 611,210
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_12_01/aio/operations/_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | 1
|
2022-01-24T08:54:57.000Z
|
2022-01-24T08:54:57.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_12_01/aio/operations/_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_12_01/aio/operations/_operations.py
|
vincenttran-msft/azure-sdk-for-python
|
348b56f9f03eeb3f7b502eed51daf494ffff874d
|
[
"MIT"
] | null | null | null |
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._operations import build_application_gateways_backend_health_request_initial, build_application_gateways_create_or_update_request_initial, build_application_gateways_delete_request_initial, build_application_gateways_get_request, build_application_gateways_list_all_request, build_application_gateways_list_request, build_application_gateways_start_request_initial, build_application_gateways_stop_request_initial, build_bgp_service_communities_list_request, build_check_dns_name_availability_request, build_express_route_circuit_authorizations_create_or_update_request_initial, build_express_route_circuit_authorizations_delete_request_initial, build_express_route_circuit_authorizations_get_request, build_express_route_circuit_authorizations_list_request, build_express_route_circuit_peerings_create_or_update_request_initial, build_express_route_circuit_peerings_delete_request_initial, build_express_route_circuit_peerings_get_request, build_express_route_circuit_peerings_list_request, build_express_route_circuits_create_or_update_request_initial, build_express_route_circuits_delete_request_initial, build_express_route_circuits_get_peering_stats_request, build_express_route_circuits_get_request, build_express_route_circuits_get_stats_request, build_express_route_circuits_list_all_request, build_express_route_circuits_list_arp_table_request_initial, build_express_route_circuits_list_request, build_express_route_circuits_list_routes_table_request_initial, build_express_route_circuits_list_routes_table_summary_request_initial, build_express_route_service_providers_list_request, build_load_balancers_create_or_update_request_initial, build_load_balancers_delete_request_initial, build_load_balancers_get_request, build_load_balancers_list_all_request, build_load_balancers_list_request, build_local_network_gateways_create_or_update_request_initial, build_local_network_gateways_delete_request_initial, build_local_network_gateways_get_request, build_local_network_gateways_list_request, build_network_interfaces_create_or_update_request_initial, build_network_interfaces_delete_request_initial, build_network_interfaces_get_effective_route_table_request_initial, build_network_interfaces_get_request, build_network_interfaces_get_virtual_machine_scale_set_network_interface_request, build_network_interfaces_list_all_request, build_network_interfaces_list_effective_network_security_groups_request_initial, build_network_interfaces_list_request, build_network_interfaces_list_virtual_machine_scale_set_network_interfaces_request, build_network_interfaces_list_virtual_machine_scale_set_vm_network_interfaces_request, build_network_security_groups_create_or_update_request_initial, build_network_security_groups_delete_request_initial, build_network_security_groups_get_request, build_network_security_groups_list_all_request, build_network_security_groups_list_request, build_network_watchers_create_or_update_request, build_network_watchers_delete_request_initial, build_network_watchers_get_flow_log_status_request_initial, build_network_watchers_get_next_hop_request_initial, build_network_watchers_get_request, build_network_watchers_get_topology_request, build_network_watchers_get_troubleshooting_request_initial, build_network_watchers_get_troubleshooting_result_request_initial, build_network_watchers_get_vm_security_rules_request_initial, build_network_watchers_list_all_request, build_network_watchers_list_request, build_network_watchers_set_flow_log_configuration_request_initial, build_network_watchers_verify_ip_flow_request_initial, build_packet_captures_create_request_initial, build_packet_captures_delete_request_initial, build_packet_captures_get_request, build_packet_captures_get_status_request_initial, build_packet_captures_list_request, build_packet_captures_stop_request_initial, build_public_ip_addresses_create_or_update_request_initial, build_public_ip_addresses_delete_request_initial, build_public_ip_addresses_get_request, build_public_ip_addresses_list_all_request, build_public_ip_addresses_list_request, build_route_filter_rules_create_or_update_request_initial, build_route_filter_rules_delete_request_initial, build_route_filter_rules_get_request, build_route_filter_rules_list_by_route_filter_request, build_route_filter_rules_update_request_initial, build_route_filters_create_or_update_request_initial, build_route_filters_delete_request_initial, build_route_filters_get_request, build_route_filters_list_by_resource_group_request, build_route_filters_list_request, build_route_filters_update_request_initial, build_route_tables_create_or_update_request_initial, build_route_tables_delete_request_initial, build_route_tables_get_request, build_route_tables_list_all_request, build_route_tables_list_request, build_routes_create_or_update_request_initial, build_routes_delete_request_initial, build_routes_get_request, build_routes_list_request, build_security_rules_create_or_update_request_initial, build_security_rules_delete_request_initial, build_security_rules_get_request, build_security_rules_list_request, build_subnets_create_or_update_request_initial, build_subnets_delete_request_initial, build_subnets_get_request, build_subnets_list_request, build_usages_list_request, build_virtual_network_gateway_connections_create_or_update_request_initial, build_virtual_network_gateway_connections_delete_request_initial, build_virtual_network_gateway_connections_get_request, build_virtual_network_gateway_connections_get_shared_key_request, build_virtual_network_gateway_connections_list_request, build_virtual_network_gateway_connections_reset_shared_key_request_initial, build_virtual_network_gateway_connections_set_shared_key_request_initial, build_virtual_network_gateways_create_or_update_request_initial, build_virtual_network_gateways_delete_request_initial, build_virtual_network_gateways_generatevpnclientpackage_request_initial, build_virtual_network_gateways_get_advertised_routes_request_initial, build_virtual_network_gateways_get_bgp_peer_status_request_initial, build_virtual_network_gateways_get_learned_routes_request_initial, build_virtual_network_gateways_get_request, build_virtual_network_gateways_list_request, build_virtual_network_gateways_reset_request_initial, build_virtual_network_peerings_create_or_update_request_initial, build_virtual_network_peerings_delete_request_initial, build_virtual_network_peerings_get_request, build_virtual_network_peerings_list_request, build_virtual_networks_check_ip_address_availability_request, build_virtual_networks_create_or_update_request_initial, build_virtual_networks_delete_request_initial, build_virtual_networks_get_request, build_virtual_networks_list_all_request, build_virtual_networks_list_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations:
"""NetworkInterfacesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_virtual_machine_scale_set_vm_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets information about all network interfaces in a virtual machine in a virtual machine scale
set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_interfaces_list_virtual_machine_scale_set_vm_network_interfaces_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
virtualmachine_index=virtualmachine_index,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_interfaces_list_virtual_machine_scale_set_vm_network_interfaces_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
virtualmachine_index=virtualmachine_index,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkInterfaceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces"} # type: ignore
@distributed_trace
def list_virtual_machine_scale_set_network_interfaces(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_interfaces_list_virtual_machine_scale_set_network_interfaces_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_virtual_machine_scale_set_network_interfaces.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_interfaces_list_virtual_machine_scale_set_network_interfaces_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkInterfaceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_network_interfaces.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces"} # type: ignore
@distributed_trace_async
async def get_virtual_machine_scale_set_network_interface(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_interfaces_get_virtual_machine_scale_set_network_interface_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
virtualmachine_index=virtualmachine_index,
network_interface_name=network_interface_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get_virtual_machine_scale_set_network_interface.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_network_interface.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_interfaces_delete_request_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
network_interface_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkInterface":
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_interfaces_get_request(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.NetworkInterface":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'NetworkInterface')
request = build_network_interfaces_create_or_update_request_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
network_interface_name: str,
parameters: "_models.NetworkInterface",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkInterface"]:
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network interface operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.NetworkInterface
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkInterface or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.NetworkInterface]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_interfaces_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_interfaces_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkInterfaceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkInterfaceListResult"]:
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_interfaces_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_interfaces_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkInterfaceListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces"} # type: ignore
async def _get_effective_route_table_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> Optional["_models.EffectiveRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_interfaces_get_effective_route_table_request_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._get_effective_route_table_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_effective_route_table_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable"} # type: ignore
@distributed_trace_async
async def begin_get_effective_route_table(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.EffectiveRouteListResult"]:
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either EffectiveRouteListResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.EffectiveRouteListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_effective_route_table_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_route_table.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable"} # type: ignore
async def _list_effective_network_security_groups_initial(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> Optional["_models.EffectiveNetworkSecurityGroupListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveNetworkSecurityGroupListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_interfaces_list_effective_network_security_groups_request_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._list_effective_network_security_groups_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_effective_network_security_groups_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups"} # type: ignore
@distributed_trace_async
async def begin_list_effective_network_security_groups(
self,
resource_group_name: str,
network_interface_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.EffectiveNetworkSecurityGroupListResult"]:
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either
EffectiveNetworkSecurityGroupListResult or the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.EffectiveNetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveNetworkSecurityGroupListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_effective_network_security_groups_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_effective_network_security_groups.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups"} # type: ignore
class ApplicationGatewaysOperations:
"""ApplicationGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_application_gateways_delete_request_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> "_models.ApplicationGateway":
"""Gets the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ApplicationGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ApplicationGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_application_gateways_get_request(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
application_gateway_name: str,
parameters: "_models.ApplicationGateway",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.ApplicationGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'ApplicationGateway')
request = build_application_gateways_create_or_update_request_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
application_gateway_name: str,
parameters: "_models.ApplicationGateway",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.ApplicationGateway"]:
"""Creates or updates the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param parameters: Parameters supplied to the create or update application gateway operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.ApplicationGateway
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ApplicationGateway or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ApplicationGateway]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ApplicationGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ApplicationGatewayListResult"]:
"""Lists all application gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_application_gateways_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_application_gateways_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ApplicationGatewayListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.ApplicationGatewayListResult"]:
"""Gets all the application gateways in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ApplicationGatewayListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.ApplicationGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_application_gateways_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_application_gateways_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ApplicationGatewayListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/applicationGateways"} # type: ignore
async def _start_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_application_gateways_start_request_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._start_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start"} # type: ignore
@distributed_trace_async
async def begin_start( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Starts the specified application gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/start"} # type: ignore
async def _stop_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_application_gateways_stop_request_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._stop_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop"} # type: ignore
@distributed_trace_async
async def begin_stop( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
application_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Stops the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/stop"} # type: ignore
async def _backend_health_initial(
self,
resource_group_name: str,
application_gateway_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> Optional["_models.ApplicationGatewayBackendHealth"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ApplicationGatewayBackendHealth"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_application_gateways_backend_health_request_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self._backend_health_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_backend_health_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth"} # type: ignore
@distributed_trace_async
async def begin_backend_health(
self,
resource_group_name: str,
application_gateway_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller["_models.ApplicationGatewayBackendHealth"]:
"""Gets the backend health of the specified application gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param application_gateway_name: The name of the application gateway.
:type application_gateway_name: str
:param expand: Expands BackendAddressPool and BackendHttpSettings referenced in backend health.
Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ApplicationGatewayBackendHealth or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ApplicationGatewayBackendHealth]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ApplicationGatewayBackendHealth"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._backend_health_initial(
resource_group_name=resource_group_name,
application_gateway_name=application_gateway_name,
expand=expand,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ApplicationGatewayBackendHealth', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_backend_health.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/applicationGateways/{applicationGatewayName}/backendhealth"} # type: ignore
class NetworkManagementClientOperationsMixin:
@distributed_trace_async
async def check_dns_name_availability(
self,
location: str,
domain_name_label: Optional[str] = None,
**kwargs: Any
) -> "_models.DnsNameAvailabilityResult":
"""Checks whether a domain name in the cloudapp.net zone is available for use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must conform to the following
regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$. Default value is None.
:type domain_name_label: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DnsNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.DnsNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DnsNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_check_dns_name_availability_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
domain_name_label=domain_name_label,
template_url=self.check_dns_name_availability.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DnsNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_dns_name_availability.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability"} # type: ignore
class ExpressRouteCircuitAuthorizationsOperations:
"""ExpressRouteCircuitAuthorizationsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuit_authorizations_delete_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified authorization from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitAuthorization":
"""Gets the specified authorization from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitAuthorization, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitAuthorization
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitAuthorization"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuit_authorizations_get_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
authorization_parameters: "_models.ExpressRouteCircuitAuthorization",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.ExpressRouteCircuitAuthorization":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitAuthorization"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(authorization_parameters, 'ExpressRouteCircuitAuthorization')
request = build_express_route_circuit_authorizations_create_or_update_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
authorization_name: str,
authorization_parameters: "_models.ExpressRouteCircuitAuthorization",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitAuthorization"]:
"""Creates or updates an authorization in the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param authorization_name: The name of the authorization.
:type authorization_name: str
:param authorization_parameters: Parameters supplied to the create or update express route
circuit authorization operation.
:type authorization_parameters:
~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitAuthorization
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitAuthorization or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitAuthorization]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitAuthorization"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
authorization_name=authorization_name,
authorization_parameters=authorization_parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ExpressRouteCircuitAuthorization', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations/{authorizationName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> AsyncIterable["_models.AuthorizationListResult"]:
"""Gets all authorizations in an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AuthorizationListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.AuthorizationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.AuthorizationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_express_route_circuit_authorizations_list_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_express_route_circuit_authorizations_list_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AuthorizationListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/authorizations"} # type: ignore
class ExpressRouteCircuitPeeringsOperations:
"""ExpressRouteCircuitPeeringsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuit_peerings_delete_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified peering from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitPeering":
"""Gets the specified authorization from the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuit_peerings_get_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
peering_parameters: "_models.ExpressRouteCircuitPeering",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.ExpressRouteCircuitPeering":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(peering_parameters, 'ExpressRouteCircuitPeering')
request = build_express_route_circuit_peerings_create_or_update_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
peering_parameters: "_models.ExpressRouteCircuitPeering",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitPeering"]:
"""Creates or updates a peering in the specified express route circuits.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param peering_parameters: Parameters supplied to the create or update express route circuit
peering operation.
:type peering_parameters: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitPeering
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitPeering or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitPeering]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
peering_parameters=peering_parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ExpressRouteCircuitPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitPeeringListResult"]:
"""Gets all peerings in a specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitPeeringListResult or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_express_route_circuit_peerings_list_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_express_route_circuit_peerings_list_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ExpressRouteCircuitPeeringListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings"} # type: ignore
class ExpressRouteCircuitsOperations:
"""ExpressRouteCircuitsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_delete_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
"""Gets information about the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_get_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.ExpressRouteCircuit",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'ExpressRouteCircuit')
request = build_express_route_circuits_create_or_update_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.ExpressRouteCircuit",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuit"]:
"""Creates or updates an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create or update express route circuit operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuit
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuit or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuit]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}"} # type: ignore
async def _list_arp_table_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsArpTableListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsArpTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_list_arp_table_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._list_arp_table_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}"} # type: ignore
@distributed_trace_async
async def begin_list_arp_table(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsArpTableListResult"]:
"""Gets the currently advertised ARP table associated with the express route circuit in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either
ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitsArpTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_arp_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}"} # type: ignore
async def _list_routes_table_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_list_routes_table_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._list_routes_table_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}"} # type: ignore
@distributed_trace_async
async def begin_list_routes_table(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsRoutesTableListResult"]:
"""Gets the currently advertised routes table associated with the express route circuit in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either
ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_routes_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}"} # type: ignore
async def _list_routes_table_summary_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_list_routes_table_summary_request_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._list_routes_table_summary_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}"} # type: ignore
@distributed_trace_async
async def begin_list_routes_table_summary(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]:
"""Gets the currently advertised routes table summary associated with the express route circuit in
a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either
ExpressRouteCircuitsRoutesTableSummaryListResult or the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}"} # type: ignore
@distributed_trace_async
async def get_stats(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitStats":
"""Gets all the stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_get_stats_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_stats.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats"} # type: ignore
@distributed_trace_async
async def get_peering_stats(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitStats":
"""Gets all stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_express_route_circuits_get_peering_stats_request(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_peering_stats.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_peering_stats.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitListResult"]:
"""Gets all the express route circuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_express_route_circuits_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_express_route_circuits_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ExpressRouteCircuitListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitListResult"]:
"""Gets all the express route circuits in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_express_route_circuits_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_express_route_circuits_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ExpressRouteCircuitListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits"} # type: ignore
class ExpressRouteServiceProvidersOperations:
"""ExpressRouteServiceProvidersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteServiceProviderListResult"]:
"""Gets all the available express route service providers.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteServiceProviderListResult or the
result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.ExpressRouteServiceProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteServiceProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_express_route_service_providers_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_express_route_service_providers_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ExpressRouteServiceProviderListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteServiceProviders"} # type: ignore
class LoadBalancersOperations:
"""LoadBalancersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_load_balancers_delete_request_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.LoadBalancer":
"""Gets the specified load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LoadBalancer, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.LoadBalancer
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_load_balancers_get_request(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
load_balancer_name: str,
parameters: "_models.LoadBalancer",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.LoadBalancer":
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancer"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'LoadBalancer')
request = build_load_balancers_create_or_update_request_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
load_balancer_name: str,
parameters: "_models.LoadBalancer",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.LoadBalancer"]:
"""Creates or updates a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param parameters: Parameters supplied to the create or update load balancer operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.LoadBalancer
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either LoadBalancer or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.LoadBalancer]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancer"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('LoadBalancer', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.LoadBalancerListResult"]:
"""Gets all the load balancers in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.LoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_load_balancers_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_load_balancers_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("LoadBalancerListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/loadBalancers"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.LoadBalancerListResult"]:
"""Gets all the load balancers in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.LoadBalancerListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_load_balancers_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_load_balancers_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("LoadBalancerListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers"} # type: ignore
class NetworkSecurityGroupsOperations:
"""NetworkSecurityGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_security_groups_delete_request_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.NetworkSecurityGroup":
"""Gets the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.NetworkSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_security_groups_get_request(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.NetworkSecurityGroup":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'NetworkSecurityGroup')
request = build_network_security_groups_create_or_update_request_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.NetworkSecurityGroup"]:
"""Creates or updates a network security group in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to the create or update network security group
operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.NetworkSecurityGroup
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkSecurityGroup or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.NetworkSecurityGroup]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_security_groups_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_security_groups_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkSecurityGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkSecurityGroups"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_security_groups_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_security_groups_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkSecurityGroupListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups"} # type: ignore
class SecurityRulesOperations:
"""SecurityRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_security_rules_delete_request_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
**kwargs: Any
) -> "_models.SecurityRule":
"""Get the specified network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.SecurityRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_security_rules_get_request(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
security_rule_parameters: "_models.SecurityRule",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.SecurityRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(security_rule_parameters, 'SecurityRule')
request = build_security_rules_create_or_update_request_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SecurityRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
network_security_group_name: str,
security_rule_name: str,
security_rule_parameters: "_models.SecurityRule",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.SecurityRule"]:
"""Creates or updates a security rule in the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param security_rule_name: The name of the security rule.
:type security_rule_name: str
:param security_rule_parameters: Parameters supplied to the create or update network security
rule operation.
:type security_rule_parameters: ~azure.mgmt.network.v2016_12_01.models.SecurityRule
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SecurityRule or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.SecurityRule]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
security_rule_name=security_rule_name,
security_rule_parameters=security_rule_parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules/{securityRuleName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.SecurityRuleListResult"]:
"""Gets all security rules in a network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityRuleListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.SecurityRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_security_rules_list_request(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_security_rules_list_request(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SecurityRuleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/securityRules"} # type: ignore
class NetworkWatchersOperations: # pylint: disable=too-many-public-methods
"""NetworkWatchersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.NetworkWatcher",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.NetworkWatcher":
"""Creates or updates a network watcher in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the network watcher resource.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.NetworkWatcher
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'NetworkWatcher')
request = build_network_watchers_create_or_update_request(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.create_or_update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
network_watcher_name: str,
**kwargs: Any
) -> "_models.NetworkWatcher":
"""Gets the specified network watcher by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkWatcher, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.NetworkWatcher
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcher"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_watchers_get_request(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkWatcher', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_watcher_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_network_watchers_delete_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_watcher_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified network watcher resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.NetworkWatcherListResult"]:
"""Gets all network watchers by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_watchers_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_watchers_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkWatcherListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.NetworkWatcherListResult"]:
"""Gets all network watchers by subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkWatcherListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.NetworkWatcherListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkWatcherListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_network_watchers_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_network_watchers_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("NetworkWatcherListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkWatchers"} # type: ignore
@distributed_trace_async
async def get_topology(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.TopologyParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.Topology":
"""Gets the current network topology by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the representation of topology.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.TopologyParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Topology, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.Topology
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Topology"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'TopologyParameters')
request = build_network_watchers_get_topology_request(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self.get_topology.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Topology', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_topology.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/topology"} # type: ignore
async def _verify_ip_flow_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.VerificationIPFlowParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.VerificationIPFlowResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'VerificationIPFlowParameters')
request = build_network_watchers_verify_ip_flow_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._verify_ip_flow_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_verify_ip_flow_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify"} # type: ignore
@distributed_trace_async
async def begin_verify_ip_flow(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.VerificationIPFlowParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.VerificationIPFlowResult"]:
"""Verify IP flow from the specified VM to a location given the currently configured NSG rules.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the IP flow to be verified.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.VerificationIPFlowParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VerificationIPFlowResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.VerificationIPFlowResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VerificationIPFlowResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._verify_ip_flow_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VerificationIPFlowResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_verify_ip_flow.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/ipFlowVerify"} # type: ignore
async def _get_next_hop_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.NextHopParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.NextHopResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'NextHopParameters')
request = build_network_watchers_get_next_hop_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._get_next_hop_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_next_hop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop"} # type: ignore
@distributed_trace_async
async def begin_get_next_hop(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.NextHopParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.NextHopResult"]:
"""Gets the next hop from the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the source and destination endpoint.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.NextHopParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NextHopResult or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.NextHopResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NextHopResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_next_hop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('NextHopResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_next_hop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/nextHop"} # type: ignore
async def _get_vm_security_rules_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.SecurityGroupViewParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.SecurityGroupViewResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'SecurityGroupViewParameters')
request = build_network_watchers_get_vm_security_rules_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._get_vm_security_rules_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vm_security_rules_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView"} # type: ignore
@distributed_trace_async
async def begin_get_vm_security_rules(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.SecurityGroupViewParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.SecurityGroupViewResult"]:
"""Gets the configured and effective security group rules on the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param parameters: Parameters that define the VM to check security groups for.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.SecurityGroupViewParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either SecurityGroupViewResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.SecurityGroupViewResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityGroupViewResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_vm_security_rules_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('SecurityGroupViewResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vm_security_rules.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/securityGroupView"} # type: ignore
async def _get_troubleshooting_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.TroubleshootingParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.TroubleshootingResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'TroubleshootingParameters')
request = build_network_watchers_get_troubleshooting_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._get_troubleshooting_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot"} # type: ignore
@distributed_trace_async
async def begin_get_troubleshooting(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.TroubleshootingParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.TroubleshootingResult"]:
"""Initiate troubleshooting on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to troubleshoot.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.TroubleshootingParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either TroubleshootingResult or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.TroubleshootingResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_troubleshooting_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/troubleshoot"} # type: ignore
async def _get_troubleshooting_result_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.QueryTroubleshootingParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.TroubleshootingResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'QueryTroubleshootingParameters')
request = build_network_watchers_get_troubleshooting_result_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._get_troubleshooting_result_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_troubleshooting_result_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult"} # type: ignore
@distributed_trace_async
async def begin_get_troubleshooting_result(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.QueryTroubleshootingParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.TroubleshootingResult"]:
"""Get the last completed troubleshooting result on a specified resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the resource to query the troubleshooting result.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.QueryTroubleshootingParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either TroubleshootingResult or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.TroubleshootingResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.TroubleshootingResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_troubleshooting_result_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('TroubleshootingResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_troubleshooting_result.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryTroubleshootResult"} # type: ignore
async def _set_flow_log_configuration_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.FlowLogInformation",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.FlowLogInformation":
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'FlowLogInformation')
request = build_network_watchers_set_flow_log_configuration_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._set_flow_log_configuration_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_flow_log_configuration_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog"} # type: ignore
@distributed_trace_async
async def begin_set_flow_log_configuration(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.FlowLogInformation",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.FlowLogInformation"]:
"""Configures flow log on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define the configuration of flow log.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.FlowLogInformation
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either FlowLogInformation or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.FlowLogInformation]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._set_flow_log_configuration_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_flow_log_configuration.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/configureFlowLog"} # type: ignore
async def _get_flow_log_status_initial(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.FlowLogStatusParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.FlowLogInformation":
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'FlowLogStatusParameters')
request = build_network_watchers_get_flow_log_status_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._get_flow_log_status_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_flow_log_status_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus"} # type: ignore
@distributed_trace_async
async def begin_get_flow_log_status(
self,
resource_group_name: str,
network_watcher_name: str,
parameters: "_models.FlowLogStatusParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.FlowLogInformation"]:
"""Queries status of flow log on a specified resource.
:param resource_group_name: The name of the network watcher resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher resource.
:type network_watcher_name: str
:param parameters: Parameters that define a resource to query flow log status.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.FlowLogStatusParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either FlowLogInformation or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.FlowLogInformation]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.FlowLogInformation"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_flow_log_status_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('FlowLogInformation', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_flow_log_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/queryFlowLogStatus"} # type: ignore
class PacketCapturesOperations:
"""PacketCapturesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_initial(
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
parameters: "_models.PacketCapture",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.PacketCaptureResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PacketCaptureResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'PacketCapture')
request = build_packet_captures_create_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PacketCaptureResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}"} # type: ignore
@distributed_trace_async
async def begin_create(
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
parameters: "_models.PacketCapture",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.PacketCaptureResult"]:
"""Create and start a packet capture on the specified VM.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param packet_capture_name: The name of the packet capture session.
:type packet_capture_name: str
:param parameters: Parameters that define the create packet capture operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.PacketCapture
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PacketCaptureResult or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.PacketCaptureResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PacketCaptureResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PacketCaptureResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> "_models.PacketCaptureResult":
"""Gets a packet capture session by name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param packet_capture_name: The name of the packet capture session.
:type packet_capture_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PacketCaptureResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.PacketCaptureResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PacketCaptureResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_packet_captures_get_request(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PacketCaptureResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_packet_captures_delete_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified packet capture session.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param packet_capture_name: The name of the packet capture session.
:type packet_capture_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}"} # type: ignore
async def _stop_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_packet_captures_stop_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._stop_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/stop"} # type: ignore
@distributed_trace_async
async def begin_stop( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Stops a specified packet capture session.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the network watcher.
:type network_watcher_name: str
:param packet_capture_name: The name of the packet capture session.
:type packet_capture_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._stop_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/stop"} # type: ignore
async def _get_status_initial(
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> "_models.PacketCaptureQueryStatusResult":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PacketCaptureQueryStatusResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_packet_captures_get_status_request_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._get_status_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_status_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/queryStatus"} # type: ignore
@distributed_trace_async
async def begin_get_status(
self,
resource_group_name: str,
network_watcher_name: str,
packet_capture_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.PacketCaptureQueryStatusResult"]:
"""Query the status of a running packet capture session.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:param packet_capture_name: The name given to the packet capture session.
:type packet_capture_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PacketCaptureQueryStatusResult or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.PacketCaptureQueryStatusResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PacketCaptureQueryStatusResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_status_initial(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
packet_capture_name=packet_capture_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PacketCaptureQueryStatusResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures/{packetCaptureName}/queryStatus"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
network_watcher_name: str,
**kwargs: Any
) -> AsyncIterable["_models.PacketCaptureListResult"]:
"""Lists all packet capture sessions within the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_watcher_name: The name of the Network Watcher resource.
:type network_watcher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PacketCaptureListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.PacketCaptureListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.PacketCaptureListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_packet_captures_list_request(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_packet_captures_list_request(
resource_group_name=resource_group_name,
network_watcher_name=network_watcher_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PacketCaptureListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkWatchers/{networkWatcherName}/packetCaptures"} # type: ignore
class PublicIPAddressesOperations:
"""PublicIPAddressesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_public_ip_addresses_delete_request_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
public_ip_address_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.PublicIPAddress":
"""Gets the specified public IP address in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_public_ip_addresses_get_request(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.PublicIPAddress",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.PublicIPAddress":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'PublicIPAddress')
request = build_public_ip_addresses_create_or_update_request_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.PublicIPAddress",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.PublicIPAddress"]:
"""Creates or updates a static or dynamic public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to the create or update public IP address operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.PublicIPAddress
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PublicIPAddress or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.PublicIPAddress]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets all the public IP addresses in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_public_ip_addresses_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_public_ip_addresses_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PublicIPAddressListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPAddresses"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets all public IP addresses in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_public_ip_addresses_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_public_ip_addresses_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PublicIPAddressListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses"} # type: ignore
class RouteFiltersOperations:
"""RouteFiltersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_filter_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_route_filters_delete_request_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_filter_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
route_filter_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.RouteFilter":
"""Gets the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param expand: Expands referenced express route bgp peering resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilter, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.RouteFilter
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_route_filters_get_request(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_filter_name: str,
route_filter_parameters: "_models.RouteFilter",
**kwargs: Any
) -> "_models.RouteFilter":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(route_filter_parameters, 'RouteFilter')
request = build_route_filters_create_or_update_request_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
route_filter_name: str,
route_filter_parameters: "_models.RouteFilter",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteFilter"]:
"""Creates or updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the create or update route filter
operation.
:type route_filter_parameters: ~azure.mgmt.network.v2016_12_01.models.RouteFilter
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilter or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.RouteFilter]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
route_filter_name: str,
route_filter_parameters: "_models.PatchRouteFilter",
**kwargs: Any
) -> "_models.RouteFilter":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(route_filter_parameters, 'PatchRouteFilter')
request = build_route_filters_update_request_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
route_filter_name: str,
route_filter_parameters: "_models.PatchRouteFilter",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteFilter"]:
"""Updates a route filter in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param route_filter_parameters: Parameters supplied to the update route filter operation.
:type route_filter_parameters: ~azure.mgmt.network.v2016_12_01.models.PatchRouteFilter
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilter or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.RouteFilter]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilter"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
route_filter_parameters=route_filter_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('RouteFilter', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}"} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RouteFilterListResult"]:
"""Gets all route filters in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_route_filters_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_route_filters_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RouteFilterListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters"} # type: ignore
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.RouteFilterListResult"]:
"""Gets all route filters in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.RouteFilterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_route_filters_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_route_filters_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RouteFilterListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeFilters"} # type: ignore
class RouteFilterRulesOperations:
"""RouteFilterRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_route_filter_rules_delete_request_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs: Any
) -> "_models.RouteFilterRule":
"""Gets the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilterRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.RouteFilterRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_route_filter_rules_get_request(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.RouteFilterRule",
**kwargs: Any
) -> "_models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(route_filter_rule_parameters, 'RouteFilterRule')
request = build_route_filter_rules_create_or_update_request_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.RouteFilterRule",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteFilterRule"]:
"""Creates or updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the create or update route filter
rule operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2016_12_01.models.RouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.RouteFilterRule]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.PatchRouteFilterRule",
**kwargs: Any
) -> "_models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(route_filter_rule_parameters, 'PatchRouteFilterRule')
request = build_route_filter_rules_update_request_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
@distributed_trace_async
async def begin_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.PatchRouteFilterRule",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteFilterRule"]:
"""Updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the update route filter rule
operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2016_12_01.models.PatchRouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.RouteFilterRule]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}"} # type: ignore
@distributed_trace
def list_by_route_filter(
self,
resource_group_name: str,
route_filter_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RouteFilterRuleListResult"]:
"""Gets all RouteFilterRules in a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterRuleListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.RouteFilterRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_route_filter_rules_list_by_route_filter_request(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_route_filter.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_route_filter_rules_list_by_route_filter_request(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RouteFilterRuleListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_route_filter.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules"} # type: ignore
class RouteTablesOperations:
"""RouteTablesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_table_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_route_tables_delete_request_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_table_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
route_table_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.RouteTable":
"""Gets the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteTable, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.RouteTable
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_route_tables_get_request(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_table_name: str,
parameters: "_models.RouteTable",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.RouteTable":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'RouteTable')
request = build_route_tables_create_or_update_request_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteTable', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
route_table_name: str,
parameters: "_models.RouteTable",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.RouteTable"]:
"""Create or updates a route table in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param parameters: Parameters supplied to the create or update route table operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.RouteTable
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteTable or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.RouteTable]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTable"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('RouteTable', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RouteTableListResult"]:
"""Gets all route tables in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteTableListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.RouteTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTableListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_route_tables_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_route_tables_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RouteTableListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.RouteTableListResult"]:
"""Gets all route tables in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteTableListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.RouteTableListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteTableListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_route_tables_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_route_tables_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RouteTableListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/routeTables"} # type: ignore
class RoutesOperations:
"""RoutesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_table_name: str,
route_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_routes_delete_request_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
route_table_name: str,
route_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified route from a route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param route_name: The name of the route.
:type route_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
route_name=route_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
route_table_name: str,
route_name: str,
**kwargs: Any
) -> "_models.Route":
"""Gets the specified route from a route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param route_name: The name of the route.
:type route_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Route, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.Route
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Route"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_routes_get_request(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Route', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_table_name: str,
route_name: str,
route_parameters: "_models.Route",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.Route":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Route"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(route_parameters, 'Route')
request = build_routes_create_or_update_request_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Route', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Route', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
route_table_name: str,
route_name: str,
route_parameters: "_models.Route",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.Route"]:
"""Creates or updates a route in the specified route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:param route_name: The name of the route.
:type route_name: str
:param route_parameters: Parameters supplied to the create or update route operation.
:type route_parameters: ~azure.mgmt.network.v2016_12_01.models.Route
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Route or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.Route]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Route"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
route_name=route_name,
route_parameters=route_parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Route', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes/{routeName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
route_table_name: str,
**kwargs: Any
) -> AsyncIterable["_models.RouteListResult"]:
"""Gets all routes in a route table.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_table_name: The name of the route table.
:type route_table_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.RouteListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_routes_list_request(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_routes_list_request(
resource_group_name=resource_group_name,
route_table_name=route_table_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RouteListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeTables/{routeTableName}/routes"} # type: ignore
class BgpServiceCommunitiesOperations:
"""BgpServiceCommunitiesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.BgpServiceCommunityListResult"]:
"""Gets all the available bgp service communities.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BgpServiceCommunityListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.BgpServiceCommunityListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpServiceCommunityListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_bgp_service_communities_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_bgp_service_communities_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("BgpServiceCommunityListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/bgpServiceCommunities"} # type: ignore
class UsagesOperations:
"""UsagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
location: str,
**kwargs: Any
) -> AsyncIterable["_models.UsagesListResult"]:
"""Lists compute usages for a subscription.
:param location: The location where resource usage is queried.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either UsagesListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.UsagesListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.UsagesListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_usages_list_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_usages_list_request(
location=location,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("UsagesListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/usages"} # type: ignore
class VirtualNetworksOperations:
"""VirtualNetworksOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_networks_delete_request_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
virtual_network_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.VirtualNetwork":
"""Gets the specified virtual network by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetwork, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.VirtualNetwork
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_networks_get_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_name: str,
parameters: "_models.VirtualNetwork",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.VirtualNetwork":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'VirtualNetwork')
request = build_virtual_networks_create_or_update_request_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_name: str,
parameters: "_models.VirtualNetwork",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetwork"]:
"""Creates or updates a virtual network in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param parameters: Parameters supplied to the create or update virtual network operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.VirtualNetwork
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetwork or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.VirtualNetwork]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetwork"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualNetwork', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}"} # type: ignore
@distributed_trace
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkListResult"]:
"""Gets all virtual networks in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_virtual_networks_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_all.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_virtual_networks_list_all_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualNetworks"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkListResult"]:
"""Gets all virtual networks in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_virtual_networks_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_virtual_networks_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks"} # type: ignore
@distributed_trace_async
async def check_ip_address_availability(
self,
resource_group_name: str,
virtual_network_name: str,
ip_address: Optional[str] = None,
**kwargs: Any
) -> "_models.IPAddressAvailabilityResult":
"""Checks whether a private IP address is available for use.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param ip_address: The private IP address to be verified. Default value is None.
:type ip_address: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IPAddressAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.IPAddressAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.IPAddressAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_networks_check_ip_address_availability_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
ip_address=ip_address,
template_url=self.check_ip_address_availability.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('IPAddressAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_ip_address_availability.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/CheckIPAddressAvailability"} # type: ignore
class SubnetsOperations:
"""SubnetsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_subnets_delete_request_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified subnet.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.Subnet":
"""Gets the specified subnet by virtual network and resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param expand: Expands referenced resources. Default value is None.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Subnet, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.Subnet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_subnets_get_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
expand=expand,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
subnet_parameters: "_models.Subnet",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.Subnet":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(subnet_parameters, 'Subnet')
request = build_subnets_create_or_update_request_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Subnet', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
subnet_parameters: "_models.Subnet",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.Subnet"]:
"""Creates or updates a subnet in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param subnet_parameters: Parameters supplied to the create or update subnet operation.
:type subnet_parameters: ~azure.mgmt.network.v2016_12_01.models.Subnet
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Subnet or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.Subnet]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subnet_parameters=subnet_parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs: Any
) -> AsyncIterable["_models.SubnetListResult"]:
"""Gets all subnets in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SubnetListResult or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.SubnetListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.SubnetListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_subnets_list_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_subnets_list_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("SubnetListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets"} # type: ignore
class VirtualNetworkPeeringsOperations:
"""VirtualNetworkPeeringsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_peerings_delete_request_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
**kwargs: Any
) -> "_models.VirtualNetworkPeering":
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_peerings_get_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}"} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
virtual_network_peering_parameters: "_models.VirtualNetworkPeering",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.VirtualNetworkPeering":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
request = build_virtual_network_peerings_create_or_update_request_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_name: str,
virtual_network_peering_name: str,
virtual_network_peering_parameters: "_models.VirtualNetworkPeering",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkPeering"]:
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters:
~azure.mgmt.network.v2016_12_01.models.VirtualNetworkPeering
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkPeering or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkPeering]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkPeeringListResult"]:
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_virtual_network_peerings_list_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_virtual_network_peerings_list_request(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkPeeringListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings"} # type: ignore
class VirtualNetworkGatewaysOperations:
"""VirtualNetworkGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VirtualNetworkGateway",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.VirtualNetworkGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'VirtualNetworkGateway')
request = build_virtual_network_gateways_create_or_update_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VirtualNetworkGateway",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGateway
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGateway]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> "_models.VirtualNetworkGateway":
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateways_get_request(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateways_delete_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkGatewayListResult"]:
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_virtual_network_gateways_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_virtual_network_gateways_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkGatewayListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways"} # type: ignore
async def _reset_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
gateway_vip: Optional[str] = None,
**kwargs: Any
) -> Optional["_models.VirtualNetworkGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateways_reset_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
gateway_vip=gateway_vip,
template_url=self._reset_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset"} # type: ignore
@distributed_trace_async
async def begin_reset(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
gateway_vip: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway. Default value is None.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGateway]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset"} # type: ignore
async def _generatevpnclientpackage_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> str:
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'VpnClientParameters')
request = build_virtual_network_gateways_generatevpnclientpackage_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._generatevpnclientpackage_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage"} # type: ignore
@distributed_trace_async
async def begin_generatevpnclientpackage(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller[str]:
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.VpnClientParameters
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage"} # type: ignore
async def _get_bgp_peer_status_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: Optional[str] = None,
**kwargs: Any
) -> Optional["_models.BgpPeerStatusListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateways_get_bgp_peer_status_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
peer=peer,
template_url=self._get_bgp_peer_status_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus"} # type: ignore
@distributed_trace_async
async def begin_get_bgp_peer_status(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller["_models.BgpPeerStatusListResult"]:
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of. Default value is None.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BgpPeerStatusListResult or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.BgpPeerStatusListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus"} # type: ignore
async def _get_learned_routes_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> Optional["_models.GatewayRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateways_get_learned_routes_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._get_learned_routes_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes"} # type: ignore
@distributed_trace_async
async def begin_get_learned_routes(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.GatewayRouteListResult"]:
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GatewayRouteListResult or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.GatewayRouteListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes"} # type: ignore
async def _get_advertised_routes_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: str,
**kwargs: Any
) -> Optional["_models.GatewayRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateways_get_advertised_routes_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
peer=peer,
template_url=self._get_advertised_routes_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes"} # type: ignore
@distributed_trace_async
async def begin_get_advertised_routes(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: str,
**kwargs: Any
) -> AsyncLROPoller["_models.GatewayRouteListResult"]:
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GatewayRouteListResult or the result
of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.GatewayRouteListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes"} # type: ignore
class VirtualNetworkGatewayConnectionsOperations:
"""VirtualNetworkGatewayConnectionsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.VirtualNetworkGatewayConnection",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.VirtualNetworkGatewayConnection":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'VirtualNetworkGatewayConnection')
request = build_virtual_network_gateway_connections_create_or_update_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.VirtualNetworkGatewayConnection",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkGatewayConnection"]:
"""Creates or updates a virtual network gateway connection in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the create or update virtual network gateway
connection operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGatewayConnection
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGatewayConnection or
the result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGatewayConnection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayConnection"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
**kwargs: Any
) -> "_models.VirtualNetworkGatewayConnection":
"""Gets the specified virtual network gateway connection by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGatewayConnection, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGatewayConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateway_connections_get_request(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGatewayConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateway_connections_delete_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network Gateway connection.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection.
:type virtual_network_gateway_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}"} # type: ignore
async def _set_shared_key_initial(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.ConnectionSharedKey",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.ConnectionSharedKey":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSharedKey"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'ConnectionSharedKey')
request = build_virtual_network_gateway_connections_set_shared_key_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._set_shared_key_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_set_shared_key_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey"} # type: ignore
@distributed_trace_async
async def begin_set_shared_key(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.ConnectionSharedKey",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.ConnectionSharedKey"]:
"""The Put VirtualNetworkGatewayConnectionSharedKey operation sets the virtual network gateway
connection shared key for passed virtual network gateway connection in the specified resource
group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The virtual network gateway connection name.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the Begin Set Virtual Network Gateway connection
Shared key operation throughNetwork resource provider.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.ConnectionSharedKey
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ConnectionSharedKey or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ConnectionSharedKey]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSharedKey"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._set_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_set_shared_key.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey"} # type: ignore
@distributed_trace_async
async def get_shared_key(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
**kwargs: Any
) -> "_models.ConnectionSharedKey":
"""The Get VirtualNetworkGatewayConnectionSharedKey operation retrieves information about the
specified virtual network gateway connection shared key through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The virtual network gateway connection shared
key name.
:type virtual_network_gateway_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ConnectionSharedKey, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.ConnectionSharedKey
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionSharedKey"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_virtual_network_gateway_connections_get_shared_key_request(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_shared_key.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ConnectionSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_shared_key.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkGatewayConnectionListResult"]:
"""The List VirtualNetworkGatewayConnections operation retrieves all the virtual network gateways
connections created.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayConnectionListResult or the
result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.VirtualNetworkGatewayConnectionListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayConnectionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_virtual_network_gateway_connections_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_virtual_network_gateway_connections_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualNetworkGatewayConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections"} # type: ignore
async def _reset_shared_key_initial(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.ConnectionResetSharedKey",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> Optional["_models.ConnectionResetSharedKey"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ConnectionResetSharedKey"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'ConnectionResetSharedKey')
request = build_virtual_network_gateway_connections_reset_shared_key_request_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._reset_shared_key_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ConnectionResetSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_shared_key_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey/reset"} # type: ignore
@distributed_trace_async
async def begin_reset_shared_key(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.ConnectionResetSharedKey",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.ConnectionResetSharedKey"]:
"""The VirtualNetworkGatewayConnectionResetSharedKey operation resets the virtual network gateway
connection shared key for passed virtual network gateway connection in the specified resource
group through Network resource provider.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The virtual network gateway connection reset
shared key Name.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the begin reset virtual network gateway connection
shared key operation through network resource provider.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.ConnectionResetSharedKey
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ConnectionResetSharedKey or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.ConnectionResetSharedKey]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ConnectionResetSharedKey"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_shared_key_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_connection_name=virtual_network_gateway_connection_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ConnectionResetSharedKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_shared_key.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/sharedkey/reset"} # type: ignore
class LocalNetworkGatewaysOperations:
"""LocalNetworkGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
local_network_gateway_name: str,
parameters: "_models.LocalNetworkGateway",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> "_models.LocalNetworkGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
_json = self._serialize.body(parameters, 'LocalNetworkGateway')
request = build_local_network_gateways_create_or_update_request_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}"} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
local_network_gateway_name: str,
parameters: "_models.LocalNetworkGateway",
*,
content_type: Optional[str] = "application/json",
**kwargs: Any
) -> AsyncLROPoller["_models.LocalNetworkGateway"]:
"""Creates or updates a local network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:param parameters: Parameters supplied to the create or update local network gateway operation.
:type parameters: ~azure.mgmt.network.v2016_12_01.models.LocalNetworkGateway
:keyword content_type: Media type of the body sent to the API. Possible values are:
"application/json" or "text/json". Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either LocalNetworkGateway or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2016_12_01.models.LocalNetworkGateway]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
parameters=parameters,
content_type=content_type,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}"} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
local_network_gateway_name: str,
**kwargs: Any
) -> "_models.LocalNetworkGateway":
"""Gets the specified local network gateway in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: LocalNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_12_01.models.LocalNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_local_network_gateways_get_request(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('LocalNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}"} # type: ignore
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
local_network_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
request = build_local_network_gateways_delete_request_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}"} # type: ignore
@distributed_trace_async
async def begin_delete( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
local_network_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified local network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param local_network_gateway_name: The name of the local network gateway.
:type local_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
local_network_gateway_name=local_network_gateway_name,
api_version=api_version,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways/{localNetworkGatewayName}"} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.LocalNetworkGatewayListResult"]:
"""Gets all the local network gateways in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LocalNetworkGatewayListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2016_12_01.models.LocalNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
api_version = kwargs.pop('api_version', "2016-12-01") # type: str
cls = kwargs.pop('cls', None) # type: ClsType["_models.LocalNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_local_network_gateways_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_local_network_gateways_list_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("LocalNetworkGatewayListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access
request,
stream=False,
**kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/localNetworkGateways"} # type: ignore
| 45.938369
| 6,753
| 0.668032
|
83e753e89e37edb739f6aba61260e9087f9f7b29
| 2,476
|
py
|
Python
|
problems/04/4.py
|
artcz/euler
|
7418387162f3e6c55673e6f70fee66f987df4a35
|
[
"MIT"
] | null | null | null |
problems/04/4.py
|
artcz/euler
|
7418387162f3e6c55673e6f70fee66f987df4a35
|
[
"MIT"
] | null | null | null |
problems/04/4.py
|
artcz/euler
|
7418387162f3e6c55673e6f70fee66f987df4a35
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
To run:
python2.7 4.py
Problem:
A palindromic number reads the same both ways. The largest palindrome made
from the product of two 2-digit numbers is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
"""
import time
def oneliner():
"""
three_digit_numbers = range(100, 1000)
products = [
x*y
for x in three_digit_numbers
for y in three_digit_numbers
]
is_palindrome = lambda x: str(x) == str(x)[::-1]
return max(p for p in products if is_palindrome(p))
"""
return max(
p for p
in [x*y for x in range(100, 1000) for y in range(100, 1000)]
if str(p) == str(p)[::-1]
)
def solve1():
def is_palindrome(n):
str_from_int = str(n)
return str_from_int == str_from_int[::-1]
three_digit_numbers = range(100, 1000)
products_of_three_digit_numbers = [
x*y for x in three_digit_numbers
for y in three_digit_numbers
]
palindromes = [
p for p in products_of_three_digit_numbers
if is_palindrome(p)
]
return max(palindromes)
def solve2():
def is_palindrome(n):
original = n
reversed = 0
while n > 0:
reversed = reversed * 10 + n % 10
n //= 10
return original == reversed
i, _max, largest_palindrome = 100, 1000, 0
while i < _max:
j = 100
while j < _max:
product = i*j
if is_palindrome(product) and product > largest_palindrome:
largest_palindrome = product
j += 1
i += 1
return largest_palindrome
def solve3():
def is_palindrome(n):
original = n
reversed = 0
while n > 0:
reversed = reversed * 10 + n % 10
n //= 10
return original == reversed
three_digit_numbers = range(100, 1000)
products_of_three_digit_numbers = [
x*y for x in three_digit_numbers
for y in three_digit_numbers
]
palindromes = [
p for p in products_of_three_digit_numbers
if is_palindrome(p)
]
return max(palindromes)
if __name__ == "__main__":
def timeit(function):
t1 = time.time()
output = function()
t2 = time.time()
return output, t2-t1
print timeit(solve1)
print timeit(solve2)
print timeit(solve3)
print timeit(oneliner)
| 21.162393
| 78
| 0.577544
|
84785230a2bdeb6e30a859b4a597b65885b58da3
| 6,792
|
py
|
Python
|
pystiche_papers/utils/misc.py
|
jbueltemeier/pystiche_papers
|
d162c2db87251f9e3280fea35cf149d030dc335b
|
[
"BSD-3-Clause"
] | null | null | null |
pystiche_papers/utils/misc.py
|
jbueltemeier/pystiche_papers
|
d162c2db87251f9e3280fea35cf149d030dc335b
|
[
"BSD-3-Clause"
] | null | null | null |
pystiche_papers/utils/misc.py
|
jbueltemeier/pystiche_papers
|
d162c2db87251f9e3280fea35cf149d030dc335b
|
[
"BSD-3-Clause"
] | null | null | null |
import contextlib
import hashlib
import random
import shutil
import tempfile
from collections import OrderedDict
from collections.abc import Sequence
from os import path
from typing import Any, Callable, Dict, Iterator, Optional
from typing import Sequence as SequenceType
from typing import Tuple, TypeVar, Union, cast, overload
import numpy as np
import torch
from torch import hub, nn
from torch.hub import _get_torch_home
from torch.utils.data.dataloader import DataLoader
from pystiche.image import extract_batch_size, is_single_image, make_batched_image
from pystiche.optim import OptimLogger
__all__ = [
"same_size_padding",
"same_size_output_padding",
"is_valid_padding",
"batch_up_image",
"paper_replication",
"make_reproducible",
"get_tmp_dir",
"get_sha256_hash",
"save_state_dict",
"load_state_dict_from_url",
]
In = TypeVar("In")
Out = TypeVar("Out")
@overload
def elementwise(fn: Callable[[In], Out], inputs: In) -> Out: # type: ignore[misc]
...
@overload
def elementwise(fn: Callable[[In], Out], inputs: SequenceType[In]) -> Tuple[Out, ...]:
...
def elementwise(
fn: Callable[[In], Out], inputs: Union[In, SequenceType[In]]
) -> Union[Out, Tuple[Out, ...]]:
if isinstance(inputs, Sequence):
return tuple(fn(input) for input in inputs)
return fn(inputs)
@overload
def same_size_padding(kernel_size: int) -> int:
...
@overload
def same_size_padding(kernel_size: SequenceType[int]) -> Tuple[int, ...]:
...
def same_size_padding(
kernel_size: Union[int, SequenceType[int]]
) -> Union[int, Tuple[int, ...]]:
return elementwise(lambda x: (x - 1) // 2, kernel_size) # type: ignore[no-any-return]
@overload
def same_size_output_padding(stride: int) -> int:
...
@overload
def same_size_output_padding(stride: SequenceType[int]) -> Tuple[int, ...]:
...
def same_size_output_padding(
stride: Union[int, SequenceType[int]]
) -> Union[int, Tuple[int, ...]]:
return elementwise(lambda x: x - 1, stride) # type: ignore[no-any-return]
def is_valid_padding(padding: Union[int, SequenceType[int]]) -> bool:
def is_valid(x: int) -> bool:
return x > 0
if isinstance(padding, int):
return is_valid(padding)
else:
return all(elementwise(is_valid, padding))
def batch_up_image(
image: torch.Tensor,
desired_batch_size: Optional[int] = None,
loader: Optional[DataLoader] = None,
) -> torch.Tensor:
def extract_batch_size_from_loader(loader: DataLoader) -> int:
batch_size = cast(Optional[int], loader.batch_size)
if batch_size is not None:
return batch_size
try:
batch_size = loader.batch_sampler.batch_size # type: ignore[attr-defined]
assert isinstance(batch_size, int)
return batch_size
except (AttributeError, AssertionError):
raise RuntimeError
if desired_batch_size is None and loader is None:
raise RuntimeError
if desired_batch_size is None:
desired_batch_size = extract_batch_size_from_loader(cast(DataLoader, loader))
if is_single_image(image):
image = make_batched_image(image)
elif extract_batch_size(image) > 1:
raise RuntimeError
return image.repeat(desired_batch_size, 1, 1, 1)
@contextlib.contextmanager
def paper_replication(
optim_logger: OptimLogger, title: str, url: str, author: str, year: Union[str, int]
) -> Iterator:
header = "\n".join(
(
"Replication of the paper",
f"'{title}'",
url,
"authored by",
author,
f"in {str(year)}",
)
)
with optim_logger.environment(header):
yield
def make_reproducible(
seed: Optional[Any] = 0, seed_standard_library: bool = True
) -> int:
def maybe_seed_standard_library(seed: int) -> None:
if seed_standard_library:
random.seed(seed)
def seed_numpy(seed: int) -> None:
np.random.seed(seed)
def seed_torch(seed: int) -> None:
torch.manual_seed(seed)
def maybe_set_cudnn() -> None:
if torch.backends.cudnn.is_available():
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
# the numpy random generator only accepts uint32 values
seed = hash(seed) % 2 ** 32
maybe_seed_standard_library(seed)
seed_numpy(seed)
seed_torch(seed)
maybe_set_cudnn()
return seed
@contextlib.contextmanager
def get_tmp_dir(**mkdtemp_kwargs: Any) -> Iterator[str]:
tmp_dir = tempfile.mkdtemp(**mkdtemp_kwargs)
try:
yield tmp_dir
finally:
shutil.rmtree(tmp_dir)
def get_sha256_hash(file: str, chunk_size: int = 4096) -> str:
hasher = hashlib.sha256()
with open(file, "rb") as fh:
for chunk in iter(lambda: fh.read(chunk_size), b""):
hasher.update(chunk)
return hasher.hexdigest()
def save_state_dict(
input: Union[Dict[str, torch.Tensor], nn.Module],
name: str,
root: Optional[str] = None,
to_cpu: bool = True,
hash_len: int = 8,
ext: str = ".pth",
) -> str:
if isinstance(input, nn.Module):
state_dict = input.state_dict()
else:
state_dict = OrderedDict(input)
if to_cpu:
state_dict = OrderedDict(
[(key, tensor.cpu()) for key, tensor in state_dict.items()]
)
if root is None:
root = _get_torch_home()
with get_tmp_dir() as tmp_dir:
tmp_file = path.join(tmp_dir, "tmp")
torch.save(state_dict, tmp_file)
sha256 = get_sha256_hash(tmp_file)
file = path.join(root, f"{name}-{sha256[:hash_len]}{ext}")
shutil.move(tmp_file, file)
return file
def load_state_dict_from_url(
url: str,
model_dir: Optional[str] = None,
map_location: Optional[Union[torch.device, str]] = None,
file_name: Optional[str] = None,
**kwargs: Any,
) -> Dict[str, torch.Tensor]:
# This is just for compatibility with torch==1.6.0 until
# https://github.com/pytorch/pytorch/issues/42596 is resolved
if model_dir is None:
model_dir = path.join(hub.get_dir(), "checkpoints")
if file_name is None:
file_name = path.basename(url)
try:
return cast(
Dict[str, torch.Tensor],
hub.load_state_dict_from_url(
url, model_dir=model_dir, file_name=file_name, **kwargs
),
)
except RuntimeError as error:
if str(error) != "Only one file(not dir) is allowed in the zipfile":
raise error
cached_file = path.join(model_dir, file_name)
return cast(
Dict[str, torch.Tensor], torch.load(cached_file, map_location=map_location)
)
| 26.53125
| 90
| 0.650471
|
52de3b1d42c4a4d3de73f1af01310d2de13c9f5b
| 1,795
|
py
|
Python
|
src/Project 2.py
|
ArsenDarbinyan/DS_Course
|
a2cd3c3d61d1e8ccd95dc23af7c3e4ced31e5ebb
|
[
"MIT"
] | null | null | null |
src/Project 2.py
|
ArsenDarbinyan/DS_Course
|
a2cd3c3d61d1e8ccd95dc23af7c3e4ced31e5ebb
|
[
"MIT"
] | null | null | null |
src/Project 2.py
|
ArsenDarbinyan/DS_Course
|
a2cd3c3d61d1e8ccd95dc23af7c3e4ced31e5ebb
|
[
"MIT"
] | null | null | null |
import matplotlib.ticker as ticker
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
import googletrans as gt
import csv
def cleaning():
with open('Data-Project-2.csv', 'r+', newline='') as file:
read = csv.reader(file)
wri = csv.writer(open("Data-Project-2.csv", 'r+', newline=''))
for row in read:
if '.' in row[3]:
row[3] = row[3].replace('.', '')
wri.writerow(row)
else:
wri.writerow(row)
df = pd.read_csv('Data Project 2.csv')
df['number'].replace(0, np.nan, inplace=True)
df.dropna(subset=['number'], inplace=True)
fig = plt.figure()
ax1 = fig.add_subplot(1, 2, 1)
ax2 = fig.add_subplot(1, 2, 2)
ax1.barh(df['year'], width=df['number'])
ax2.barh(df['month'], width=df['number'])
ax1.xaxis.set_major_locator(ticker.MultipleLocator(5000))
ax1.yaxis.set_major_locator(ticker.MultipleLocator(1))
ax1.xaxis.set_minor_locator(ticker.MultipleLocator(2500))
ax2.xaxis.set_major_locator(ticker.MultipleLocator(5000))
ax2.yaxis.set_major_locator(ticker.MultipleLocator(1))
ax2.xaxis.set_minor_locator(ticker.MultipleLocator(2500))
df_month = pd.DataFrame(df[['month', 'number']])
numbers_sum = df_month.groupby(['month']).sum()
translator = gt.Translator(service_urls=['translate.google.com', 'translate.google.co.kr'])
months = numbers_sum.index.tolist()
translate = translator.translate(months, src='pt')
translate1 = []
for i in translate:
translate1.append(i.text)
x = pd.DataFrame(numbers_sum)
df_trans_month = pd.DataFrame(x.values, index=translate1, columns=['number'])
fig = plt.figure()
ax3 = fig.add_subplot(1, 1, 1)
ax3.barh(df_trans_month.index, width=df_trans_month['number'])
plt.show()
| 31.491228
| 92
| 0.671309
|
3a5aaa138a9edd92dafb0c4ee5b8def33a3beec8
| 19,969
|
py
|
Python
|
chapter1-nlp-essentials/SMS_Spam_Detection.py
|
c-w-m/anlp-tf2
|
24453710d1c6f9bdd9f235189cc0f7dd7a2e82da
|
[
"MIT"
] | null | null | null |
chapter1-nlp-essentials/SMS_Spam_Detection.py
|
c-w-m/anlp-tf2
|
24453710d1c6f9bdd9f235189cc0f7dd7a2e82da
|
[
"MIT"
] | null | null | null |
chapter1-nlp-essentials/SMS_Spam_Detection.py
|
c-w-m/anlp-tf2
|
24453710d1c6f9bdd9f235189cc0f7dd7a2e82da
|
[
"MIT"
] | null | null | null |
# Based on SMS_Spam_Detection
# edited to run on local PC without GPU setup
import io
import re
import stanza
import pandas as pd
import tensorflow as tf
import stopwordsiso as stopwords
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.preprocessing import LabelEncoder
from keras.utils import np_utils
from gensim.models.word2vec import Word2Vec
import gensim.downloader as api
print("TensorFlow Version: " + tf.__version__)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nDownload Data\n - do this from notebook code")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nTest data reading:")
lines = io.open('data/SMSSpamCollection').read().strip().split('\n')
print(lines[0])
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nPre-Process Data")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
spam_dataset = []
count = 0
for line in lines:
label, text = line.split('\t')
if label.lower().strip() == 'spam':
spam_dataset.append((1, text.strip()))
count += 1
else:
spam_dataset.append(((0, text.strip())))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nprint(spam_dataset[0])")
print(spam_dataset[0])
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+'\n\nprint("Spam: ", count)')
print("Spam: ", count)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nData Normalization")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
df = pd.DataFrame(spam_dataset, columns=['Spam', 'Message'])
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Normalization functions
def message_length(x):
# returns total number of characters
return len(x)
def num_capitals(x):
_, count = re.subn(r'[A-Z]', '', x) # only works in english
return count
def num_punctuation(x):
_, count = re.subn(r'\W', '', x)
return count
df['Capitals'] = df['Message'].apply(num_capitals)
df['Punctuation'] = df['Message'].apply(num_punctuation)
df['Length'] = df['Message'].apply(message_length)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nCorpus:")
print(df.describe())
train = df.sample(frac=0.8,random_state=42) #random state is a seed value
test = df.drop(train.index)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nTrain:")
print(train.describe())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nTest:")
print(train.describe())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nModel Building")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Basic 1-layer neural network model for evaluation
def make_model(input_dims=3, num_units=12):
model = tf.keras.Sequential()
# Adds a densely-connected layer with 12 units to the model:
model.add(tf.keras.layers.Dense(num_units,
input_dim=input_dims,
activation='relu'))
# Add a sigmoid layer with a binary output unit:
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam',
metrics=['accuracy'])
return model
x_train = train[['Length', 'Punctuation', 'Capitals']]
y_train = train[['Spam']]
x_test = test[['Length', 'Punctuation', 'Capitals']]
y_test = test[['Spam']]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nx_train:")
print(x_train)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80 * "~") + "\n\nmodel = make_model():")
model = make_model()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nmodel.fit(x_train, y_train, epochs=10, batch_size=10)")
model.fit(x_train, y_train, epochs=10, batch_size=10)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nmodel.evaluation(x_test, y_test)")
model.evaluate(x_test, y_test)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\ny_train_pred = model.predict_classes(x_train)")
y_train_pred = model.predict_classes(x_train)
#print((80*"~")+"\n\ny_train_pred = np.argmax(model.predict(x_train), axis=-1)")
#y_train_pred: object = np.argmax(model.predict(x_train), axis=-1)
# confusion matrix
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\ntf.math.confusion_matrix(tf.constant(y_train.Spam), y_train_pred)")
print(tf.math.confusion_matrix(tf.constant(y_train.Spam), y_train_pred))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nsum(y_train_pred)")
print(sum(y_train_pred))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\ny_test_pred = model.predict_classes(x_test)")
y_test_pred = model.predict_classes(x_test)
#print((80*"~")+"\n\ny_train_pred = np.argmax(model.predict(x_test), axis=-1)")
#y_test_pred = np.argmax(model.predict(x_test), axis=-1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\ntf.math.confusion_matrix(tf.constant(y_test.Spam), y_test_pred)")
print(tf.math.confusion_matrix(tf.constant(y_test.Spam), y_test_pred))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nTokenization and Stop Word Removal"+(80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
sentence = 'Go until jurong point, crazy.. Available only in bugis n great world'
sentence.split()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nen = stanza.download('en')")
en = stanza.download('en')
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80 * "~") + "\n\nen = stanza.Pipeline(lang='en')")
en = stanza.Pipeline(lang='en')
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nprint(sentence)")
print(sentence)
tokenized = en(sentence)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nprint(len(tokenized.sentences))")
print(len(tokenized.sentences))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nprint(<End of Sentence>)")
for snt in tokenized.sentences:
for word in snt.tokens:
print(word.text)
print("<End of Sentence>")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nDependency Parsing Example\n"+(80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nen2 = stanza.Pipeline(lang='en')")
en2 = stanza.Pipeline(lang='en')
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nprint(<End of Sentence>)")
pr2 = en2("Hari went to school")
for snt in pr2.sentences:
for word in snt.tokens:
print(word)
print("<End of Sentence>")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nJapanese Tokenization Example"+(80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\njp = stanza.download('ja')")
jp = stanza.download('ja')
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\njp = stanza.Pipeline(lang='ja')")
jp = stanza.Pipeline(lang='ja')
jp_line = jp("選挙管理委員会")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nsnt.tokens")
for snt in jp_line.sentences:
for word in snt.tokens:
print(word.text)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nAdding Word Count Feature"+(80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def word_counts(x, pipeline=en):
doc = pipeline(x)
count = sum( [ len(sentence.tokens) for sentence in doc.sentences] )
return count
#en = snlp.Pipeline(lang='en', processors='tokenize')
df['Words'] = df['Message'].apply(word_counts)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nCorpus: (Words added)")
print(df.describe())
#train=df.sample(frac=0.8,random_state=42) #random state is a seed value
#test=df.drop(train.index)
train['Words'] = train['Message'].apply(word_counts)
test['Words'] = test['Message'].apply(word_counts)
x_train = train[['Length', 'Punctuation', 'Capitals', 'Words']]
y_train = train[['Spam']]
x_test = test[['Length', 'Punctuation', 'Capitals' , 'Words']]
y_test = test[['Spam']]
model = make_model(input_dims=4)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nmodel.fit(x_train, y_train, epochs=10, batch_size=10)")
model.fit(x_train, y_train, epochs=10, batch_size=10)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nmodel.evaluate(x_test, y_test)")
model.evaluate(x_test, y_test)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nStop Word Removal")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(stopwords.langs())")
print(stopwords.langs())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(sorted(stopwords.stopwords('en')))")
print(sorted(stopwords.stopwords('en')))
en_sw = stopwords.stopwords('en')
def word_counts(x, pipeline=en):
doc = pipeline(x)
count = 0
for sentence in doc.sentences:
for token in sentence.tokens:
if token.text.lower() not in en_sw:
count += 1
return count
train['Words'] = train['Message'].apply(word_counts)
test['Words'] = test['Message'].apply(word_counts)
x_train = train[['Length', 'Punctuation', 'Capitals', 'Words']]
y_train = train[['Spam']]
x_test = test[['Length', 'Punctuation', 'Capitals' , 'Words']]
y_test = test[['Spam']]
model = make_model(input_dims=4)
#model = make_model(input_dims=3)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nmodel.fit(x_train, y_train, epochs=10, batch_size=10)")
model.fit(x_train, y_train, epochs=10, batch_size=10)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\n"+(80*"~")+"\nPOS Based Features"+(80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~")+"\n\nen = stanza.Pipeline(lang='en')")
en = stanza.Pipeline(lang='en')
txt = "Yo you around? A friend of mine's lookin."
pos = en(txt)
def print_pos(doc):
text = ""
for sentence in doc.sentences:
for token in sentence.tokens:
text += token.words[0].text + "/" + \
token.words[0].upos + " "
text += "\n"
return text
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(print_pos(pos))")
print(print_pos(pos))
en_sw = stopwords.stopwords('en')
def word_counts_v3(x, pipeline=en):
doc = pipeline(x)
count = 0
for sentence in doc.sentences:
for token in sentence.tokens:
if token.text.lower() not in en_sw and token.words[0].upos not in ['PUNCT', 'SYM']:
count += 1
return count
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(word_counts(txt), word_counts_v3(txt))")
print(word_counts(txt), word_counts_v3(txt))
train['Test'] = 0
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(train.describe())")
print(train.describe())
def word_counts_v3(x, pipeline=en):
doc = pipeline(x)
totals = 0.
count = 0.
non_word = 0.
for sentence in doc.sentences:
totals += len(sentence.tokens) # (1)
for token in sentence.tokens:
if token.text.lower() not in en_sw:
if token.words[0].upos not in ['PUNCT', 'SYM']:
count += 1.
else:
non_word += 1.
non_word = non_word / totals
return pd.Series([count, non_word], index=['Words_NoPunct', 'Punct'])
x = train[:10]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nx.describe()")
print(x.describe())
train_tmp = train['Message'].apply(word_counts_v3)
train = pd.concat([train, train_tmp], axis=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\ntrain.describe()")
print(train.describe())
test_tmp = test['Message'].apply(word_counts_v3)
test = pd.concat([test, test_tmp], axis=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\ntest.describe()")
print(test.describe())
z = pd.concat([x, train_tmp], axis=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(z.describe())")
print(z.describe())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(z.loc[z['Spam']==0].describe())")
print(z.loc[z['Spam']==0].describe())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(z.loc[z['Spam']==1].describe())")
print(z.loc[z['Spam']==1].describe())
aa = [word_counts_v3(y) for y in x['Message']]
ab = pd.DataFrame(aa)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(ab.describe())")
print(ab.describe())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\n" + (80*"~") +"\nLemmatization")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
text = "Stemming is aimed at reducing vocabulary and aid un-derstanding of" +\
" morphological processes. This helps people un-derstand the" +\
" morphology of words and reduce size of corpus."
lemma = en(text)
lemmas = ""
for sentence in lemma.sentences:
for token in sentence.tokens:
lemmas += token.words[0].lemma +"/" + \
token.words[0].upos + " "
lemmas += "\n"
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(lemmas)")
print(lemmas)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\n" + (80*"~") + "\nTF-IDF Based Model\n" + (80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
corpus = [
"I like fruits. Fruits like bananas",
"I love bananas but eat an apple",
"An apple a day keeps the doctor away"
]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\n" + (80*"~") +"\nCount Vectorization")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
vectorizer = CountVectorizer()
X = vectorizer.fit_transform(corpus)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(vectorizer.get_feature_names())")
print(vectorizer.get_feature_names())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(X.toarray())")
print(X.toarray())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(cosine_similarity(X.toarray()))")
print(cosine_similarity(X.toarray()))
query = vectorizer.transform(["apple and bananas"])
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(cosine_similarity(X, query))")
print(cosine_similarity(X, query))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\n" + (80*"~") +"\nTF-IDF Vectorization")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
transformer = TfidfTransformer(smooth_idf=False)
tfidf = transformer.fit_transform(X.toarray())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(tfidf.toarray())")
print(pd.DataFrame(tfidf.toarray(), columns=vectorizer.get_feature_names()))
tfidf = TfidfVectorizer(binary=True)
X = tfidf.fit_transform(train['Message']).astype('float32')
X_test = tfidf.transform(test['Message']).astype('float32')
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(X.shape)")
print(X.shape)
_, cols = X.shape
model2 = make_model(cols) # to match tf-idf dimensions
lb = LabelEncoder()
y = lb.fit_transform(y_train)
dummy_y_train = np_utils.to_categorical(y)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nmodel2.fit(X.toarray(), y_train, epochs=10, batch_size=10)")
model2.fit(X.toarray(), y_train, epochs=10, batch_size=10)
model2.evaluate(X_test.toarray(), y_test)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(train.loc[train.Spam == 1].describe())")
print(train.loc[train.Spam == 1].describe())
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\n" + (80*"~") +"\nWord Vectors")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(api.info())")
print(api.info())
model_w2v = api.load("word2vec-google-news-300")
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(model_w2v.most_similar('cookies',topn=10))")
print(model_w2v.most_similar("cookies",topn=10))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(model_w2v.doesnt_match(['USA','Canada','India','Tokyo']))")
print(model_w2v.doesnt_match(["USA","Canada","India","Tokyo"]))
king = model_w2v['king']
man = model_w2v['man']
woman = model_w2v['woman']
queen = king - man + woman
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print((80*"~") + "\n\nprint(model_w2v.similar_by_vector(queen))")
print(model_w2v.similar_by_vector(queen))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print("\n\n" + (80*"~") +"\n-- end of 'SMS_Spam_Detection.py' --\n" + (
80*"~"))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
| 37.820076
| 93
| 0.4195
|
4d0c248641d2957f4b686fedeb0cfa0301c728cb
| 2,145
|
py
|
Python
|
esbmc_wr/log/log.py
|
thalestas/esbmc-wr
|
b10521a1f36e3c8c08799c05bed710263d7c1df6
|
[
"Apache-2.0"
] | 1
|
2022-01-21T18:36:20.000Z
|
2022-01-21T18:36:20.000Z
|
esbmc_wr/log/log.py
|
thalestas/esbmc-wr
|
b10521a1f36e3c8c08799c05bed710263d7c1df6
|
[
"Apache-2.0"
] | 4
|
2021-06-01T20:50:46.000Z
|
2022-01-04T04:30:24.000Z
|
esbmc_wr/log/log.py
|
thalestas/esbmc-wr
|
b10521a1f36e3c8c08799c05bed710263d7c1df6
|
[
"Apache-2.0"
] | 1
|
2022-01-31T03:47:33.000Z
|
2022-01-31T03:47:33.000Z
|
import os
import sys
import logging
from datetime import datetime
DIRECTORY = "output"
LOG_FILE = "output.log"
def create_dir(name):
try:
os.mkdir(name)
except FileExistsError:
print("Directory ", name, " already exists.")
def configure(verbose):
create_dir(DIRECTORY)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
if verbose:
stdout_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(stdout_handler)
date = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
log_name = "esbmc-{}.log".format(date)
file_handler = logging.FileHandler("output/"+log_name)
logger.addHandler(file_handler)
return log_name
def info(msg):
logging.info(msg)
def error(msg):
logging.info(msg)
def header(c_file, esbmc_args, item):
logging.info("########################################")
logging.info("[FILE] %s", c_file)
logging.info("[ARGS] %s", esbmc_args)
logging.info("[FUNCTION] %s", item)
logging.info("########################################\n")
def header_retest(c_file, esbmc_args, item):
logging.info("")
logging.info("########################################")
logging.info("*****RETEST*****")
logging.info("[FILE] %s", c_file)
logging.info("[ARGS] %s", esbmc_args)
logging.info("[FUNCTION] %s", item)
logging.info("########################################\n")
def finish_time(c_file, elapsed):
logging.info("########################################")
logging.info("[FILE]: %s [TIME]: %s", c_file, elapsed)
logging.info("########################################\n")
def overall_time(elapsed_all):
logging.info("[OVERALL TIME]: %s", elapsed_all)
def summary(n_files, n_func, n_cex, time, memory):
print("\n########################################")
print("Summary:\n")
print("Files Verified: ", n_files)
print("Functions Verified: ", n_func)
print("Counterexamples: ", n_cex)
print("")
print(f"Overall time: {round(time,2)}s")
print(f"Peak Memory Usage: {round((memory / 10**6),2)}MB")
print("########################################\n")
| 28.6
| 62
| 0.544522
|
4132743b20bc469f867d057d2bb6383f7da68343
| 704
|
py
|
Python
|
pj_23.py
|
luisalvaradoar/pj_euler
|
03e4ee9d0cc64ae9650b801554bf053d2db0b684
|
[
"Apache-2.0"
] | null | null | null |
pj_23.py
|
luisalvaradoar/pj_euler
|
03e4ee9d0cc64ae9650b801554bf053d2db0b684
|
[
"Apache-2.0"
] | null | null | null |
pj_23.py
|
luisalvaradoar/pj_euler
|
03e4ee9d0cc64ae9650b801554bf053d2db0b684
|
[
"Apache-2.0"
] | null | null | null |
from math import sqrt
from itertools import combinations
def sum_divisores(n):
lista = []
for i in range(1, int(sqrt(n)) + 1):
if n%i == i:
lista.append(i)
elif n%i == 0:
lista.append(i)
lista.append(n//i)
lista.remove(n)
return(sum(sorted(set(lista))))
abundant = []
for i in range(1, 28123 + 1):
if sum_divisores(i) > i:
abundant.append(i)
combinaciones = list(combinations(abundant,2))
sumas = []
for i in abundant:
suma = 2*i
if suma < 28123:
sumas.append(suma)
for i in combinaciones:
suma = sum(i)
if suma < 28123:
sumas.append(suma)
sumas = list(set(sumas))
S = 0
for i in range(1,28123):
if i not in sumas:
S += i
print(S)
#4179871
#[Finished in 51.1s]
| 14.978723
| 46
| 0.644886
|
3fab8d907f59068adacaca3ac3cda32921c04dd4
| 739
|
py
|
Python
|
cogs/config.py
|
Xeift/Kizmeow-NFT-Mint-Progress-Discord-Bot
|
1507857ff838a45492c24d5e7fa2661caf3831c1
|
[
"MIT"
] | 2
|
2022-03-07T10:41:48.000Z
|
2022-03-11T01:52:30.000Z
|
cogs/config.py
|
Xeift/Kizmeow-NFT-Mint-Progress-Discord-Bot
|
1507857ff838a45492c24d5e7fa2661caf3831c1
|
[
"MIT"
] | null | null | null |
cogs/config.py
|
Xeift/Kizmeow-NFT-Mint-Progress-Discord-Bot
|
1507857ff838a45492c24d5e7fa2661caf3831c1
|
[
"MIT"
] | null | null | null |
import discord
import asyncio
from discord_slash.utils.manage_commands import create_option
import urllib.request as ur
import json
from discord_slash import cog_ext
from core.cog_core import cogcore
import os
import time
class config_check(cogcore):
@cog_ext.cog_slash(
name="config_check",
description="[admin] check the information you entered")
async def config(self,ctx):
role = discord.utils.get(ctx.guild.roles, name="Bot Admin")
if role in ctx.author.roles:
await ctx.send(file=discord.File("config.json"))
else:
embed=discord.Embed(title="[failed]\nreason: user do not have `Bot Admin` role", color=0xe8006f)
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(config_check(bot))
| 29.56
| 102
| 0.746955
|
4bfd42c1273061e2e9b0aa4c90ca21fd4acf0fd8
| 3,374
|
py
|
Python
|
mnist_softmax.py
|
cnishina/tf-mnist
|
3a60dc6061b29a33bf449500b3875963d1ea97af
|
[
"MIT"
] | null | null | null |
mnist_softmax.py
|
cnishina/tf-mnist
|
3a60dc6061b29a33bf449500b3875963d1ea97af
|
[
"MIT"
] | null | null | null |
mnist_softmax.py
|
cnishina/tf-mnist
|
3a60dc6061b29a33bf449500b3875963d1ea97af
|
[
"MIT"
] | null | null | null |
# Follows the tutorial https://www.tensorflow.org/get_started/mnist/beginners
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
class MnistSoftmax:
def __init__(self):
self.accuracy = None
def find_accuracy(self):
# Download input from tensorflow
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
# ==============================================================================================
# Implementing the regression
#
# The softmax regression has two steps: add up evnidence then convert them into probabilities.
# The tally of evidence of all the pictures will result in positive and negative weights.
#
# See:
# https://www.tensorflow.org/images/softmax-weights.png
# https://www.tensorflow.org/images/softmax-regression-scalargraph.png
# https://www.tensorflow.org/images/softmax-regression-scalarequation.png
# https://www.tensorflow.org/images/softmax-regression-vectorequation.png
#
# Set up variables for softmax, 784 images with 10 labels (0 through 9)
x = tf.placeholder(tf.float32, [None, 784]) # inputs
W = tf.Variable(tf.zeros([784, 10])) # weights of evidence
b = tf.Variable(tf.zeros([10])) # bias, additional evidence
# x * W instead of W * x. Why? W * x would result in a single vector
# and x * W would result in a matrix
y = tf.nn.softmax(tf.matmul(x, W) + b)
# ==============================================================================================
# Training
#
# Determining the loss (or define what it means for a model to be bad), we determine the
# "cross-entropy".
#
# Cross entropy:
# The raw formulation of cross-entropy,
#
# tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(tf.nn.softmax(y)),
# reduction_indices=[1]))
#
# can be numerically unstable.
#
# So here we use tf.nn.softmax_cross_entropy_with_logits on the raw
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
# feed in data 100 at a time for a 1000 repetitions
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})
# ==============================================================================================
# Evaluate the model
#
# Gives you the index of the highest entry in a tensor along some axis. The accuracy result
# should be around 92%.
correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(y_,1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels})
self.accuracy = sess.run(accuracy, feed_dict={x: mnist.test.images, y_: mnist.test.labels})
return self.accuracy
| 46.219178
| 104
| 0.570243
|
1fb778d8ca79145f111d9d629e9995d7391cf241
| 557
|
py
|
Python
|
qmpy/data/__init__.py
|
JosephMontoya-TRI/qmpy
|
5a5aa7b25b1231801969ea042bbd2309cacc7859
|
[
"MIT"
] | 1
|
2019-11-15T20:54:04.000Z
|
2019-11-15T20:54:04.000Z
|
qmpy/data/__init__.py
|
JosephMontoya-TRI/qmpy_py3
|
5a5aa7b25b1231801969ea042bbd2309cacc7859
|
[
"MIT"
] | null | null | null |
qmpy/data/__init__.py
|
JosephMontoya-TRI/qmpy_py3
|
5a5aa7b25b1231801969ea042bbd2309cacc7859
|
[
"MIT"
] | null | null | null |
import yaml
import os.path
location = os.path.dirname(__file__)
data = open(location+'/elements/groups.yml').read()
element_groups = yaml.load(data, Loader=yaml.FullLoader)
data = open(location+'/elements/data.yml').read()
elements = yaml.load(data, Loader=yaml.FullLoader)
data = open(location+'/elements/chemical_potentials.yml').read()
chem_pots = yaml.load(data, Loader=yaml.FullLoader)
def save_chem_pots(chem_pots):
file = open(location+'/elements/chemical_potentials.yml', 'w')
file.write(yaml.dump(chem_pots, default_flow_style=False))
| 30.944444
| 66
| 0.75763
|
94de659b2fea4911532e008cef2f0b2acfe3bdd3
| 3,756
|
py
|
Python
|
faster-rcnn-resnet50/model/faster_rcnn_resnet50.py
|
fengkaibit/faster-rcnn_resnet50
|
ad0f9377012aba12a562e2827bc815c79e4b7c8a
|
[
"MIT"
] | 5
|
2019-05-21T13:20:03.000Z
|
2021-04-24T15:44:37.000Z
|
faster-rcnn-resnet50/model/faster_rcnn_resnet50.py
|
chenjun19950315/faster-rcnn_resnet50
|
ad0f9377012aba12a562e2827bc815c79e4b7c8a
|
[
"MIT"
] | 1
|
2019-05-23T03:22:04.000Z
|
2019-05-23T03:22:04.000Z
|
faster-rcnn-resnet50/model/faster_rcnn_resnet50.py
|
fengkaibit/faster-rcnn_resnet50
|
ad0f9377012aba12a562e2827bc815c79e4b7c8a
|
[
"MIT"
] | 2
|
2019-05-21T13:20:04.000Z
|
2019-08-23T08:43:04.000Z
|
from __future__ import absolute_import
import torch
from torchvision.models import resnet50
from model.region_proposal_network import RegionProposalNetwork
from model.faster_rcnn import FasterRCNN
from model.roi_module import RoIPooling2D
from utils.config import opt
from utils import array_tool
from roi_align.functions.roi_align import RoIAlignFunction
def set_bn_fix(m): #冻结batchnorm的参数
classname = m.__class__.__name__
if classname.find('BatchNorm') != -1:
for p in m.parameters(): p.requires_grad = False
def decom_resnet50():
model = resnet50(not opt.load_path)
features_base = torch.nn.Sequential(
model.conv1, model.bn1, model.relu, model.maxpool,
model.layer1, model.layer2, model.layer3)
features_top = torch.nn.Sequential(
model.layer4, model.avgpool)
classifier = torch.nn.Sequential(
model.fc)
for layer in features_base[:5]: #冻结conv1 和conv2_x
for p in layer.parameters():
p.requires_grad = False
features_base.apply(set_bn_fix)
features_top.apply(set_bn_fix)
return features_base, features_top, classifier
class FasterRCNNResNet50(FasterRCNN):
feat_stride = 16
def __init__(self,
n_fg_class= opt.class_num,
ratios=[0.5, 1, 2],
anchor_scales=[8, 16, 32]):
features_base, features_top, classifier = decom_resnet50()
rpn = RegionProposalNetwork(
1024, 1024,
ratios=ratios,
anchor_scales=anchor_scales,
feat_stride=self.feat_stride,
)
head = Resnet50RoIHead(
n_class=n_fg_class + 1,
roi_size=14,
spatial_scale=(1. / self.feat_stride),
features_top=features_top,
classifier=classifier
)
super(FasterRCNNResNet50, self).__init__(
features_base,
rpn,
head,
)
class Resnet50RoIHead(torch.nn.Module):
def __init__(self, n_class, roi_size, spatial_scale, features_top, classifier):
super(Resnet50RoIHead, self).__init__()
self.features_top = features_top
self.classifier = classifier
self.cls_loc = torch.nn.Linear(1000, n_class * 4)
self.score = torch.nn.Linear(1000, n_class)
normal_init(self.cls_loc, 0, 0.01)
normal_init(self.score, 0, 0.01)
self.n_class = n_class
self.roi_size = roi_size
self.spatial_scale = spatial_scale
#self.roi = RoIPooling2D(self.roi_size, self.roi_size, self.spatial_scale)
self.roi_align = RoIAlignFunction(self.roi_size, self.roi_size, self.spatial_scale)
def forward(self, x, rois, roi_indices):
roi_indices = array_tool.totensor(roi_indices).float()
rois = array_tool.totensor(rois).float()
indices_and_rois = torch.cat([roi_indices[:, None], rois], dim=1)
xy_indices_and_rois = indices_and_rois[:, [0, 2, 1, 4, 3]] # yx->xy
indices_and_rois = xy_indices_and_rois.contiguous() # 把tensor变成在内存中连续分布的形式
#pool = self.roi(x, indices_and_rois) #将conv4_x通过roi_pooling
pool = self.roi_align(x, indices_and_rois) #将conv4_x通过roi_align
conv5_out = self.features_top(pool) #通过conv5_x
fc_in = conv5_out.view(conv5_out.size(0), -1)
fc = self.classifier(fc_in)
roi_cls_locs = self.cls_loc(fc) # (1000->84)每一类坐标回归
roi_scores = self.score(fc) # (1000->21) 每一类类别预测
return roi_cls_locs, roi_scores
def normal_init(m, mean, stddev, truncated=False):
if truncated:
m.weight.data.normal_().fmod_(2).mul_(stddev).add_(mean) #截断产生正态分布
else:
m.weight.data.normal_(mean, stddev) #普通产生正态分布
m.bias.data.zero_()
| 36.466019
| 91
| 0.660011
|
83c3b16002beb14208c18622a7af6c420ebea8ff
| 9,202
|
py
|
Python
|
reinforcement/tensorflow/minigo/selfplay_worker.py
|
mwnewlin/afit_mlperf_training
|
abdf362abe6a14a85e13b8b18afe7d40cc9e5430
|
[
"Apache-2.0"
] | null | null | null |
reinforcement/tensorflow/minigo/selfplay_worker.py
|
mwnewlin/afit_mlperf_training
|
abdf362abe6a14a85e13b8b18afe7d40cc9e5430
|
[
"Apache-2.0"
] | 12
|
2019-03-25T17:38:16.000Z
|
2022-03-11T23:43:25.000Z
|
reinforcement/tensorflow/minigo/selfplay_worker.py
|
mwnewlin/afit_mlperf_training
|
abdf362abe6a14a85e13b8b18afe7d40cc9e5430
|
[
"Apache-2.0"
] | 1
|
2019-03-11T15:59:25.000Z
|
2019-03-11T15:59:25.000Z
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wrapper scripts to ensure that main.py commands are called correctly."""
import argh
import argparse
import cloud_logging
import logging
import os
import main
import shipname
import sys
import time
import shutil
import dual_net
import preprocessing
import numpy
import random
from utils import timer
from tensorflow import gfile
import tensorflow as tf
import logging
import goparams
import qmeas
import multiprocessing
# Pull in environment variables. Run `source ./cluster/common` to set these.
#BUCKET_NAME = os.environ['BUCKET_NAME']
#BASE_DIR = "gs://{}".format(BUCKET_NAME)
#BASE_DIR = goparams.BASE_DIR
BASE_DIR = sys.argv[1]
MODELS_DIR = os.path.join(BASE_DIR, 'models')
SELFPLAY_DIR = os.path.join(BASE_DIR, 'data/selfplay')
HOLDOUT_DIR = os.path.join(BASE_DIR, 'data/holdout')
SGF_DIR = os.path.join(BASE_DIR, 'sgf')
TRAINING_CHUNK_DIR = os.path.join(BASE_DIR, 'data', 'training_chunks')
ESTIMATOR_WORKING_DIR = os.path.join(BASE_DIR, 'estimator_working_dir')
# What percent of games to holdout from training per generation
HOLDOUT_PCT = goparams.HOLDOUT_PCT
def print_flags():
flags = {
# 'BUCKET_NAME': BUCKET_NAME,
'BASE_DIR': BASE_DIR,
'MODELS_DIR': MODELS_DIR,
'SELFPLAY_DIR': SELFPLAY_DIR,
'HOLDOUT_DIR': HOLDOUT_DIR,
'SGF_DIR': SGF_DIR,
'TRAINING_CHUNK_DIR': TRAINING_CHUNK_DIR,
'ESTIMATOR_WORKING_DIR': ESTIMATOR_WORKING_DIR,
}
print("Computed variables are:")
print('\n'.join('--{}={}'.format(flag, value)
for flag, value in flags.items()))
def get_models():
"""Finds all models, returning a list of model number and names
sorted increasing.
Returns: [(13, 000013-modelname), (17, 000017-modelname), ...etc]
"""
all_models = gfile.Glob(os.path.join(MODELS_DIR, '*.meta'))
model_filenames = [os.path.basename(m) for m in all_models]
model_numbers_names = sorted([
(shipname.detect_model_num(m), shipname.detect_model_name(m))
for m in model_filenames])
return model_numbers_names
def get_latest_model():
"""Finds the latest model, returning its model number and name
Returns: (17, 000017-modelname)
"""
models = get_models()
if len(models) == 0:
models = [(0, '000000-bootstrap')]
return models[-1]
def get_model(model_num):
models = {k: v for k, v in get_models()}
if not model_num in models:
raise ValueError("Model {} not found!".format(model_num))
return models[model_num]
def game_counts(n_back=20):
"""Prints statistics for the most recent n_back models"""
all_models = gfile.Glob(os.path.join(MODELS_DIR, '*.meta'))
model_filenames = sorted([os.path.basename(m).split('.')[0]
for m in all_models], reverse=True)
for m in model_filenames[:n_back]:
games = gfile.Glob(os.path.join(SELFPLAY_DIR, m, '*.zz'))
print(m, len(games))
def bootstrap():
bootstrap_name = shipname.generate(0)
bootstrap_model_path = os.path.join(MODELS_DIR, bootstrap_name)
print("Bootstrapping with working dir {}\n Model 0 exported to {}".format(
ESTIMATOR_WORKING_DIR, bootstrap_model_path))
main.bootstrap(ESTIMATOR_WORKING_DIR, bootstrap_model_path)
def selfplay(model_name, readouts=goparams.SP_READOUTS, verbose=1, resign_threshold=0.95):
print("Playing a game with model {}".format(model_name))
model_save_path = os.path.join(MODELS_DIR, model_name)
game_output_dir = os.path.join(SELFPLAY_DIR, model_name)
game_holdout_dir = os.path.join(HOLDOUT_DIR, model_name)
sgf_dir = os.path.join(SGF_DIR, model_name)
main.selfplay(
load_file=model_save_path,
output_dir=game_output_dir,
holdout_dir=game_holdout_dir,
output_sgf=sgf_dir,
readouts=readouts,
holdout_pct=HOLDOUT_PCT,
resign_threshold=resign_threshold,
verbose=verbose,
)
def selfplay_cache_model(network, model_name, readouts=goparams.SP_READOUTS, verbose=1, resign_threshold=0.95):
print("Playing a game with model {}".format(model_name))
game_output_dir = os.path.join(SELFPLAY_DIR, model_name)
game_holdout_dir = os.path.join(HOLDOUT_DIR, model_name)
sgf_dir = os.path.join(SGF_DIR, model_name)
main.selfplay_cache_model(
network=network,
output_dir=game_output_dir,
holdout_dir=game_holdout_dir,
output_sgf=sgf_dir,
readouts=readouts,
holdout_pct=HOLDOUT_PCT,
resign_threshold=resign_threshold,
verbose=verbose,
)
def gather():
print("Gathering game output...")
main.gather(input_directory=SELFPLAY_DIR,
output_directory=TRAINING_CHUNK_DIR)
def train():
model_num, model_name = get_latest_model()
print("Training on gathered game data, initializing from {}".format(model_name))
new_model_name = shipname.generate(model_num + 1)
print("New model will be {}".format(new_model_name))
load_file = os.path.join(MODELS_DIR, model_name)
save_file = os.path.join(MODELS_DIR, new_model_name)
# try:
main.train(ESTIMATOR_WORKING_DIR, TRAINING_CHUNK_DIR, save_file,
generation_num=model_num + 1)
# except:
# print("Got an error training, muddling on...")
# logging.exception("Train error")
def validate(model_num=None, validate_name=None):
""" Runs validate on the directories up to the most recent model, or up to
(but not including) the model specified by `model_num`
"""
if model_num is None:
model_num, model_name = get_latest_model()
else:
model_num = int(model_num)
model_name = get_model(model_num)
# Model N was trained on games up through model N-2, so the validation set
# should only be for models through N-2 as well, thus the (model_num - 1)
# term.
models = list(
filter(lambda num_name: num_name[0] < (model_num - 1), get_models()))
# Run on the most recent 50 generations,
# TODO(brianklee): make this hyperparameter dependency explicit/not hardcoded
holdout_dirs = [os.path.join(HOLDOUT_DIR, pair[1])
for pair in models[-50:]]
main.validate(ESTIMATOR_WORKING_DIR, *holdout_dirs,
checkpoint_name=os.path.join(MODELS_DIR, model_name),
validate_name=validate_name)
def echo():
pass # Flags are echo'd in the ifmain block below.
def selfplay_hook(args):
selfplay(**args)
def selfplay_laod_model(model_name):
load_file = os.path.join(MODELS_DIR, model_name)
network = dual_net.DualNetwork(load_file)
return network
def rl_loop():
"""Run the reinforcement learning loop
This tries to create a realistic way to run the reinforcement learning with
all default parameters.
"""
if goparams.DUMMY_MODEL:
# monkeypatch the hyperparams so that we get a quickly executing network.
dual_net.get_default_hyperparams = lambda **kwargs: {
'k': 8, 'fc_width': 16, 'num_shared_layers': 1, 'l2_strength': 1e-4, 'momentum': 0.9}
dual_net.TRAIN_BATCH_SIZE = 16
dual_net.EXAMPLES_PER_GENERATION = 64
# monkeypatch the shuffle buffer size so we don't spin forever shuffling up positions.
preprocessing.SHUFFLE_BUFFER_SIZE = 1000
_, model_name = get_latest_model()
network = selfplay_laod_model(model_name)
def count_games():
# returns number of games in the selfplay directory
if not os.path.exists(os.path.join(SELFPLAY_DIR, model_name)):
# directory not existing implies no games have been played yet
return 0
return len(gfile.Glob(os.path.join(SELFPLAY_DIR, model_name, '*.zz')))
while count_games() < goparams.MAX_GAMES_PER_GENERATION:
selfplay_cache_model(network, model_name)
print('Stopping selfplay after finding {} games played.'.format(count_games()))
if __name__ == '__main__':
# tf.logging.set_verbosity(tf.logging.INFO)
seed = int(sys.argv[2])
print('Self play worker: setting random seed = ', seed)
random.seed(seed)
tf.set_random_seed(seed)
numpy.random.seed(seed)
# get TF logger
log = logging.getLogger('tensorflow')
log.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# create file handler which logs even debug messages
fh = logging.FileHandler('tensorflow.log')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
log.addHandler(fh)
rl_loop()
| 32.982079
| 111
| 0.694632
|
367006a0d92b33733f3798d4757a182275e06f25
| 19,259
|
py
|
Python
|
local/usap/create_cruise.py
|
schmidtocean/openrvdas
|
c83505a41c71d56d8e8edfb4bc3d077700e88b91
|
[
"BSD-2-Clause"
] | null | null | null |
local/usap/create_cruise.py
|
schmidtocean/openrvdas
|
c83505a41c71d56d8e8edfb4bc3d077700e88b91
|
[
"BSD-2-Clause"
] | null | null | null |
local/usap/create_cruise.py
|
schmidtocean/openrvdas
|
c83505a41c71d56d8e8edfb4bc3d077700e88b91
|
[
"BSD-2-Clause"
] | null | null | null |
#! /usr/bin/env python3
"""This script creates a fairly simple "skinny" cruise definition
file from a port_def.yaml specification that, in addition to other
destinations, also writes parsed data to InfluxDB. A typical
invocation would be
local/usap/create_cruise.py \
test/NBP1406/NBP1406_port_defs.yaml > test/NBP1406/NBP1406_cruise.yaml
It creates four modes:
off - nothing running
port - like no_write (below), but only run subset of loggers
no_write - run all loggers, but don't write to disk
write - as above, but also write raw to file
All modes (except 'off') also write to InfluxDB and the cached data server.
Two derived data loggers are also included: true_wind and snapshot; these
are written to InfluxDB and the cached data server , but not disk.
There is no timeout checking of any loggers.
"""
import argparse
import getpass
import logging
import sys
import time
import yaml
from collections import OrderedDict
HEADER_TEMPLATE = """###########################################################
###########################################################
# YAML cruise definition file for OpenRVDAS.
#
# Created by:
# command: %COMMAND_LINE%
# time: %DATE_TIME% UTC
# user: %USER%
#
########################################
cruise:
id: %CRUISE%
start: '%CRUISE_START%'
end: '%CRUISE_END%'
"""
LOGGER_TEMPLATE = """ ########
%LOGGER%->off:
name: %LOGGER%->off
%LOGGER%->net:
name: %LOGGER%->net
readers: # Read from serial port
- class: SerialReader
kwargs:
baudrate: %BAUD%
port: %TTY%
transforms: # Add timestamp and logger label
- class: TimestampTransform
- class: PrefixTransform
kwargs:
prefix: %LOGGER%
writers:
- class: UDPWriter
kwargs:
port: %RAW_UDP_PORT%
destination: %UDP_DESTINATION%
%LOGGER%->net/file:
name: %LOGGER%->net/file
readers: # Read from serial port
- class: SerialReader
kwargs:
baudrate: %BAUD%
port: %TTY%
transforms: # Add timestamp
- class: TimestampTransform
writers:
- class: LogfileWriter # Write to logfile
kwargs:
filebase: %FILE_ROOT%/%LOGGER%/raw/%CRUISE%_%LOGGER%
- class: ComposedWriter
kwargs:
transforms:
- class: PrefixTransform
kwargs:
prefix: %LOGGER%
writers:
- class: UDPWriter
kwargs:
port: %RAW_UDP_PORT%
destination: %UDP_DESTINATION%
"""
# Read raw records from UDP, parse them and distribute to CDS and InfluxDB
NET_READER_TEMPLATE = """ ########
net_reader->off:
name: net_reader->off
net_reader->on:
name: net_reader->on
readers: # Read from simulated serial port
- class: UDPReader
kwargs:
port: %RAW_UDP_PORT%
transforms: # Add timestamp and logger label
- class: ParseTransform
kwargs:
metadata_interval: 10
definition_path: %PARSE_DEFINITION_PATH%
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
net_reader->on+influx:
name: net_reader->on+influx
readers: # Read from simulated serial port
- class: UDPReader
kwargs:
port: %RAW_UDP_PORT%
transforms: # Add timestamp and logger label
- class: ParseTransform
kwargs:
metadata_interval: 10
definition_path: %PARSE_DEFINITION_PATH%
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
- class: InfluxDBWriter
kwargs:
bucket_name: openrvdas
"""
TRUE_WIND_TEMPLATE = """ ########
true_wind->off:
name: true_wind->off
true_wind->on:
name: true_wind->on
readers:
- class: CachedDataReader
kwargs:
data_server: %DATA_SERVER%
subscription:
fields:
S330CourseTrue:
seconds: 0
S330HeadingTrue:
seconds: 0
S330SpeedKt:
seconds: 0
MwxPortRelWindDir:
seconds: 0
MwxPortRelWindSpeed:
seconds: 0
MwxStbdRelWindDir:
seconds: 0
MwxStbdRelWindSpeed:
seconds: 0
writers:
- class: ComposedWriter
kwargs:
transforms:
- class: TrueWindsTransform
kwargs:
apparent_dir_name: PortApparentWindDir
convert_speed_factor: 0.5144
course_field: S330CourseTrue
heading_field: S330HeadingTrue
speed_field: S330SpeedKt
wind_dir_field: MwxPortRelWindDir
wind_speed_field: MwxPortRelWindSpeed
true_dir_name: PortTrueWindDir
true_speed_name: PortTrueWindSpeed
update_on_fields:
- MwxPortRelWindDir
max_field_age:
S330CourseTrue: 15
S330HeadingTrue: 15
S330SpeedKt: 15
MwxPortRelWindDir: 15
MwxPortRelWindSpeed: 15
metadata_interval: 10
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
- class: ComposedWriter
kwargs:
transforms:
- class: TrueWindsTransform
kwargs:
apparent_dir_name: StbdApparentWindDir
convert_speed_factor: 0.5144
course_field: S330CourseTrue
heading_field: S330HeadingTrue
speed_field: S330SpeedKt
wind_dir_field: MwxStbdRelWindDir
wind_speed_field: MwxStbdRelWindSpeed
true_dir_name: StbdTrueWindDir
true_speed_name: StbdTrueWindSpeed
update_on_fields:
- MwxStbdRelWindDir
max_field_age:
S330CourseTrue: 15
S330HeadingTrue: 15
S330SpeedKt: 15
MwxStbdRelWindDir: 15
MwxStbdRelWindSpeed: 15
metadata_interval: 10
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
true_wind->on+influx:
name: true_wind->on+influx
readers:
- class: CachedDataReader
kwargs:
data_server: %DATA_SERVER%
subscription:
fields:
S330CourseTrue:
seconds: 0
S330HeadingTrue:
seconds: 0
S330SpeedKt:
seconds: 0
MwxPortRelWindDir:
seconds: 0
MwxPortRelWindSpeed:
seconds: 0
MwxStbdRelWindDir:
seconds: 0
MwxStbdRelWindSpeed:
seconds: 0
writers:
- class: ComposedWriter
kwargs:
transforms:
- class: TrueWindsTransform
kwargs:
apparent_dir_name: PortApparentWindDir
convert_speed_factor: 0.5144
course_field: S330CourseTrue
heading_field: S330HeadingTrue
speed_field: S330SpeedKt
wind_dir_field: MwxPortRelWindDir
wind_speed_field: MwxPortRelWindSpeed
true_dir_name: PortTrueWindDir
true_speed_name: PortTrueWindSpeed
update_on_fields:
- MwxPortRelWindDir
max_field_age:
S330CourseTrue: 15
S330HeadingTrue: 15
S330SpeedKt: 15
MwxPortRelWindDir: 15
MwxPortRelWindSpeed: 15
metadata_interval: 10
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
- class: InfluxDBWriter
kwargs:
bucket_name: openrvdas
measurement_name: true_wind
- class: ComposedWriter
kwargs:
transforms:
- class: TrueWindsTransform
kwargs:
apparent_dir_name: StbdApparentWindDir
convert_speed_factor: 0.5144
course_field: S330CourseTrue
heading_field: S330HeadingTrue
speed_field: S330SpeedKt
wind_dir_field: MwxStbdRelWindDir
wind_speed_field: MwxStbdRelWindSpeed
true_dir_name: StbdTrueWindDir
true_speed_name: StbdTrueWindSpeed
update_on_fields:
- MwxStbdRelWindDir
max_field_age:
S330CourseTrue: 15
S330HeadingTrue: 15
S330SpeedKt: 15
MwxStbdRelWindDir: 15
MwxStbdRelWindSpeed: 15
metadata_interval: 10
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
- class: InfluxDBWriter
kwargs:
bucket_name: openrvdas
measurement_name: true_wind
"""
SNAPSHOT_TEMPLATE = """ ########
# Derived data subsampling logger
snapshot->off:
name: snapshot->off
snapshot->on:
name: snapshot->on
readers:
- class: CachedDataReader
kwargs:
data_server: %DATA_SERVER%
subscription:
fields:
MwxAirTemp:
seconds: 0
RTMPTemp:
seconds: 0
PortTrueWindDir:
seconds: 0
PortTrueWindSpeed:
seconds: 0
StbdTrueWindDir:
seconds: 0
StbdTrueWindSpeed:
seconds: 0
MwxBarometer:
seconds: 0
KnudDepthHF:
seconds: 0
KnudDepthLF:
seconds: 0
Grv1Value:
seconds: 0
transforms:
- class: InterpolationTransform
module: logger.transforms.interpolation_transform
kwargs:
interval: 30
window: 30
metadata_interval: 60 # send metadata every 60 seconds
field_spec:
AvgMwxAirTemp:
source: MwxAirTemp
algorithm:
type: boxcar_average
window: 30
AvgRTMPTemp:
source: RTMPTemp
algorithm:
type: boxcar_average
window: 30
AvgPortTrueWindDir:
source: PortTrueWindDir
algorithm:
type: polar_average
window: 30
AvgPortTrueWindSpeed:
source: PortTrueWindSpeed
algorithm:
type: boxcar_average
window: 30
AvgStbdTrueWindDir:
source: StbdTrueWindDir
algorithm:
type: polar_average
window: 30
AvgStbdTrueWindSpeed:
source: StbdTrueWindSpeed
algorithm:
type: boxcar_average
window: 30
AvgMwxBarometer:
source: MwxBarometer
algorithm:
type: boxcar_average
window: 30
AvgKnudDepthHF:
source: KnudDepthHF
algorithm:
type: boxcar_average
window: 30
AvgKnudDepthLF:
source: KnudDepthLF
algorithm:
type: boxcar_average
window: 30
AvgGrv1Value:
source: Grv1Value
algorithm:
type: boxcar_average
window: 30
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
snapshot->on+influx:
name: snapshot->on+influx
readers:
- class: CachedDataReader
kwargs:
data_server: %DATA_SERVER%
subscription:
fields:
MwxAirTemp:
seconds: 0
RTMPTemp:
seconds: 0
PortTrueWindDir:
seconds: 0
PortTrueWindSpeed:
seconds: 0
StbdTrueWindDir:
seconds: 0
StbdTrueWindSpeed:
seconds: 0
MwxBarometer:
seconds: 0
KnudDepthHF:
seconds: 0
KnudDepthLF:
seconds: 0
Grv1Value:
seconds: 0
transforms:
- class: InterpolationTransform
module: logger.transforms.interpolation_transform
kwargs:
interval: 30
window: 30
metadata_interval: 60 # send metadata every 60 seconds
field_spec:
AvgMwxAirTemp:
source: MwxAirTemp
algorithm:
type: boxcar_average
window: 30
AvgRTMPTemp:
source: RTMPTemp
algorithm:
type: boxcar_average
window: 30
AvgPortTrueWindDir:
source: PortTrueWindDir
algorithm:
type: polar_average
window: 30
AvgPortTrueWindSpeed:
source: PortTrueWindSpeed
algorithm:
type: boxcar_average
window: 30
AvgStbdTrueWindDir:
source: StbdTrueWindDir
algorithm:
type: polar_average
window: 30
AvgStbdTrueWindSpeed:
source: StbdTrueWindSpeed
algorithm:
type: boxcar_average
window: 30
AvgMwxBarometer:
source: MwxBarometer
algorithm:
type: boxcar_average
window: 30
AvgKnudDepthHF:
source: KnudDepthHF
algorithm:
type: boxcar_average
window: 30
AvgKnudDepthLF:
source: KnudDepthLF
algorithm:
type: boxcar_average
window: 30
AvgGrv1Value:
source: Grv1Value
algorithm:
type: boxcar_average
window: 30
writers:
- class: CachedDataWriter
kwargs:
data_server: %DATA_SERVER%
- class: InfluxDBWriter
kwargs:
bucket_name: openrvdas
measurement_name: snapshot
"""
####################
def fill_substitutions(template, substitutions):
output = template
for src, dest in substitutions.items():
output = output.replace(str(src), str(dest))
return output
################################################################################
################################################################################
parser = argparse.ArgumentParser()
parser.add_argument('def_filename', metavar='def_filename', type=str,
help='YAML file containing cruise and port specifications')
args = parser.parse_args()
with open(args.def_filename, 'r') as fp:
try:
port_def = yaml.load(fp, Loader=yaml.FullLoader)
except AttributeError:
# If they've got an older yaml, it may not have FullLoader)
port_def = yaml.load(fp)
# Create dict of variables we're going to substitute into the templates
substitutions = {
'%CRUISE%': port_def.get('cruise', {}).get('id'),
'%CRUISE_START%': port_def.get('cruise', {}).get('start'),
'%CRUISE_END%': port_def.get('cruise', {}).get('end'),
'%UDP_DESTINATION%': port_def.get('network', {}).get('destination', '255.255.255.255'),
'%RAW_UDP_PORT%': port_def.get('network', {}).get('raw_udp_port'),
'%PARSED_UDP_PORT%': port_def.get('network', {}).get('parsed_udp_port'),
'%DATA_SERVER%': port_def.get('network', {}).get('data_server'),
'%FILE_ROOT%': port_def.get('file_root', '/var/tmp/log'),
'%PARSE_DEFINITION_PATH%': port_def.get('parse_definition_path', ''),
'%COMMAND_LINE%': ' '.join(sys.argv),
'%DATE_TIME%': time.asctime(time.gmtime()),
'%USER%': getpass.getuser(),
}
loggers = port_def.get('ports').keys()
################################################################################
# Start with header template
output = fill_substitutions(HEADER_TEMPLATE, substitutions)
################################################################################
# Fill in the logger definitions
output += """
########################################
loggers:
"""
LOGGER_DEF = """ %LOGGER%:
configs:
- %LOGGER%->off
- %LOGGER%->net
- %LOGGER%->net/file
"""
for logger in loggers:
output += fill_substitutions(LOGGER_DEF, substitutions).replace('%LOGGER%', logger)
output += """ net_reader:
configs:
- net_reader->off
- net_reader->on
- net_reader->on+influx
"""
output += """ true_wind:
configs:
- true_wind->off
- true_wind->on
- true_wind->on+influx
"""
output += """ snapshot:
configs:
- snapshot->off
- snapshot->on
- snapshot->on+influx
"""
################################################################################
# Fill in mode definitions
output += """
########################################
modes:
'off':
"""
for logger in loggers:
output += ' %LOGGER%: %LOGGER%->off\n'.replace('%LOGGER%', logger)
output += ' net_reader: net_reader->off\n'
output += ' true_wind: true_wind->off\n'
output += ' snapshot: snapshot->off\n'
#### no_write
output += """
no_write:
"""
for logger in loggers:
output += ' %LOGGER%: %LOGGER%->net\n'.replace('%LOGGER%', logger)
output += ' net_reader: net_reader->on\n'
output += ' true_wind: true_wind->on\n'
output += ' snapshot: snapshot->on\n'
#### no_write+influx
output += """
no_write+influx:
"""
for logger in loggers:
output += ' %LOGGER%: %LOGGER%->net\n'.replace('%LOGGER%', logger)
output += ' net_reader: net_reader->on+influx\n'
output += ' true_wind: true_wind->on+influx\n'
output += ' snapshot: snapshot->on+influx\n'
#### write
output += """
write:
"""
for logger in loggers:
output += ' %LOGGER%: %LOGGER%->net/file\n'.replace('%LOGGER%', logger)
output += ' net_reader: net_reader->on\n'
output += ' true_wind: true_wind->on\n'
output += ' snapshot: snapshot->on\n'
#### write+influx
output += """
write+influx:
"""
for logger in loggers:
output += ' %LOGGER%: %LOGGER%->net/file\n'.replace('%LOGGER%', logger)
output += ' net_reader: net_reader->on+influx\n'
output += ' true_wind: true_wind->on+influx\n'
output += ' snapshot: snapshot->on+influx\n'
output += """
########################################
default_mode: 'off'
"""
################################################################################
# Now output configs
output += """
########################################
configs:
"""
for logger in loggers:
logger_port_def = port_def.get('ports').get(logger).get('port_tab')
if not logger_port_def:
logging.warning('No port def for %s', logger)
(inst, tty, baud, datab, stopb, parity, igncr, icrnl, eol, onlcr,
ocrnl, icanon, vmin, vtime, vintr, vquit, opost) = logger_port_def.split()
logger_def = fill_substitutions(LOGGER_TEMPLATE, substitutions)
logger_def = logger_def.replace('%LOGGER%', logger)
logger_def = logger_def.replace('%TTY%', tty)
logger_def = logger_def.replace('%BAUD%', baud)
output += logger_def
output += fill_substitutions(NET_READER_TEMPLATE, substitutions)
# Add in the true wind and snapshot configurations
output += fill_substitutions(TRUE_WIND_TEMPLATE, substitutions)
output += fill_substitutions(SNAPSHOT_TEMPLATE, substitutions)
print(output)
| 28.874063
| 89
| 0.554494
|
314d8b77d3ba2377f8f9060bee4f82ca28277b23
| 1,314
|
py
|
Python
|
app.py
|
gabrielbazan/http_auth
|
5d86669ead6beedd8d980031d09d7dedfc1033fd
|
[
"MIT"
] | null | null | null |
app.py
|
gabrielbazan/http_auth
|
5d86669ead6beedd8d980031d09d7dedfc1033fd
|
[
"MIT"
] | null | null | null |
app.py
|
gabrielbazan/http_auth
|
5d86669ead6beedd8d980031d09d7dedfc1033fd
|
[
"MIT"
] | null | null | null |
from functools import wraps
from flask import Flask, request, jsonify
from authentication import AuthenticatorFactory, Token, AuthenticationException
app = Flask(__name__)
# This is an example, so we declare it here. You should store it elsewhere
user_id = None
@app.errorhandler(AuthenticationException)
def error(e):
return jsonify(dict(message='Authentication failure', detail=str(e))), 401
def protected(f):
@wraps(f)
def decorated(*args, **kwargs):
header = request.headers.get('Authorization', '').split(' ')
if len(header) != 2:
raise AuthenticationException('Wrong credentials')
method, value = header
user = AuthenticatorFactory.create(method).authenticate(value)
if not user:
raise AuthenticationException('Wrong credentials')
# Do something with the user
global user_id
user_id = user
return f(*args, **kwargs)
return decorated
@app.route('/secret', methods=['GET'])
@protected
def hello():
return jsonify(dict(message='You are in!')), 200
@app.route('/tokens', methods=['POST'])
@protected
def authenticate():
print user_id
return jsonify(dict(token=Token.generate(user_id))), 201
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5050, debug=True)
| 23.890909
| 79
| 0.67656
|
d337436f0559791c01be429314ece6bc54eb8c8e
| 2,937
|
py
|
Python
|
cloudroast/images/v2/functional/test_get_versions.py
|
bhushan5/cloudroast
|
60e05df96fe50b3bc511ee1cf1c818329d4360a1
|
[
"Apache-2.0"
] | null | null | null |
cloudroast/images/v2/functional/test_get_versions.py
|
bhushan5/cloudroast
|
60e05df96fe50b3bc511ee1cf1c818329d4360a1
|
[
"Apache-2.0"
] | null | null | null |
cloudroast/images/v2/functional/test_get_versions.py
|
bhushan5/cloudroast
|
60e05df96fe50b3bc511ee1cf1c818329d4360a1
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2014 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cafe.drivers.unittest.datasets import DatasetList
from cafe.drivers.unittest.decorators import (
data_driven_test, DataDrivenFixture, tags)
from cloudroast.images.fixtures import ImagesFixture
class VersionsDatasetList(DatasetList):
def __init__(self):
self.append_new_dataset('url_with_no_backslash', {'url_addition': ''})
self.append_new_dataset('url_with_backslash', {'url_addition': '/'})
self.append_new_dataset('url_with_versions_and_no_backslash',
{'url_addition': '/versions'})
self.append_new_dataset('url_with_versions_and_backslash',
{'url_addition': '/versions/'})
@DataDrivenFixture
class TestGetVersions(ImagesFixture):
@tags(type='smoke')
@data_driven_test(VersionsDatasetList())
def ddtest_get_versions(self, url_addition):
"""
@summary: Get all versions of using each possible url
1) Get versions of Cloud Images using each url
2) Verify that the response code is 300
3) Verify that the number of versions returned is as expected
4) Verify that each version returned contains the correct parameters
and values
"""
versions_data = self.get_comparison_data(
self.images_config.versions_data)
errors = []
response = self.images_client.get_versions(url_addition)
self.assertEqual(response.status_code, 300)
list_versions = response.entity
self.assertEqual(len(list_versions), len(versions_data))
for version in list_versions:
version_data = versions_data[version.id_]
if version.links[0].href != version_data.get('href'):
errors.append(self.error_msg.format(
'href', version_data.get('href'), version.links[0].href))
if version.links[0].rel != version_data.get('rel'):
errors.append(self.error_msg.format(
'rel', version_data.get('rel'), version.links[0].rel))
if version.status.lower() != version_data.get('status').lower():
errors.append(self.error_msg.format(
'status', version_data.get('status').lower(),
version.status.lower()))
self.assertEqual.im_class.maxDiff = None
self.assertListEqual(errors, [])
| 39.16
| 78
| 0.670752
|
f26067e1bf60965e4cdf78886e9a27d84c7b80aa
| 16,304
|
py
|
Python
|
handleExcel/exportor/scripts/xlrd-1.0.0/scripts/runxlrd.py
|
Jack301/toolkit
|
2d1e37a36281af7722be93d4d5299c1ce8b7f365
|
[
"MIT"
] | 95
|
2016-09-30T02:55:26.000Z
|
2022-01-12T05:47:30.000Z
|
handleExcel/exportor/scripts/xlrd-1.0.0/scripts/runxlrd.py
|
Jack301/toolkit
|
2d1e37a36281af7722be93d4d5299c1ce8b7f365
|
[
"MIT"
] | 5
|
2019-06-27T19:02:18.000Z
|
2019-08-07T07:16:49.000Z
|
handleExcel/exportor/scripts/xlrd-1.0.0/scripts/runxlrd.py
|
Jack301/toolkit
|
2d1e37a36281af7722be93d4d5299c1ce8b7f365
|
[
"MIT"
] | 32
|
2017-03-28T06:45:09.000Z
|
2021-12-21T10:33:10.000Z
|
#!/usr/bin/env python
# Copyright (c) 2005-2012 Stephen John Machin, Lingfo Pty Ltd
# This script is part of the xlrd package, which is released under a
# BSD-style licence.
from __future__ import print_function
cmd_doc = """
Commands:
2rows Print the contents of first and last row in each sheet
3rows Print the contents of first, second and last row in each sheet
bench Same as "show", but doesn't print -- for profiling
biff_count[1] Print a count of each type of BIFF record in the file
biff_dump[1] Print a dump (char and hex) of the BIFF records in the file
fonts hdr + print a dump of all font objects
hdr Mini-overview of file (no per-sheet information)
hotshot Do a hotshot profile run e.g. ... -f1 hotshot bench bigfile*.xls
labels Dump of sheet.col_label_ranges and ...row... for each sheet
name_dump Dump of each object in book.name_obj_list
names Print brief information for each NAME record
ov Overview of file
profile Like "hotshot", but uses cProfile
show Print the contents of all rows in each sheet
version[0] Print versions of xlrd and Python and exit
xfc Print "XF counts" and cell-type counts -- see code for details
[0] means no file arg
[1] means only one file arg i.e. no glob.glob pattern
"""
options = None
if __name__ == "__main__":
PSYCO = 0
import xlrd
import sys, time, glob, traceback, gc
from xlrd.timemachine import xrange, REPR
class LogHandler(object):
def __init__(self, logfileobj):
self.logfileobj = logfileobj
self.fileheading = None
self.shown = 0
def setfileheading(self, fileheading):
self.fileheading = fileheading
self.shown = 0
def write(self, text):
if self.fileheading and not self.shown:
self.logfileobj.write(self.fileheading)
self.shown = 1
self.logfileobj.write(text)
null_cell = xlrd.empty_cell
def show_row(bk, sh, rowx, colrange, printit):
if bk.ragged_rows:
colrange = range(sh.row_len(rowx))
if not colrange: return
if printit: print()
if bk.formatting_info:
for colx, ty, val, cxfx in get_row_data(bk, sh, rowx, colrange):
if printit:
print("cell %s%d: type=%d, data: %r, xfx: %s"
% (xlrd.colname(colx), rowx+1, ty, val, cxfx))
else:
for colx, ty, val, _unused in get_row_data(bk, sh, rowx, colrange):
if printit:
print("cell %s%d: type=%d, data: %r" % (xlrd.colname(colx), rowx+1, ty, val))
def get_row_data(bk, sh, rowx, colrange):
result = []
dmode = bk.datemode
ctys = sh.row_types(rowx)
cvals = sh.row_values(rowx)
for colx in colrange:
cty = ctys[colx]
cval = cvals[colx]
if bk.formatting_info:
cxfx = str(sh.cell_xf_index(rowx, colx))
else:
cxfx = ''
if cty == xlrd.XL_CELL_DATE:
try:
showval = xlrd.xldate_as_tuple(cval, dmode)
except xlrd.XLDateError as e:
showval = "%s:%s" % (type(e).__name__, e)
cty = xlrd.XL_CELL_ERROR
elif cty == xlrd.XL_CELL_ERROR:
showval = xlrd.error_text_from_code.get(cval, '<Unknown error code 0x%02x>' % cval)
else:
showval = cval
result.append((colx, cty, showval, cxfx))
return result
def bk_header(bk):
print()
print("BIFF version: %s; datemode: %s"
% (xlrd.biff_text_from_num[bk.biff_version], bk.datemode))
print("codepage: %r (encoding: %s); countries: %r"
% (bk.codepage, bk.encoding, bk.countries))
print("Last saved by: %r" % bk.user_name)
print("Number of data sheets: %d" % bk.nsheets)
print("Use mmap: %d; Formatting: %d; On demand: %d"
% (bk.use_mmap, bk.formatting_info, bk.on_demand))
print("Ragged rows: %d" % bk.ragged_rows)
if bk.formatting_info:
print("FORMATs: %d, FONTs: %d, XFs: %d"
% (len(bk.format_list), len(bk.font_list), len(bk.xf_list)))
if not options.suppress_timing:
print("Load time: %.2f seconds (stage 1) %.2f seconds (stage 2)"
% (bk.load_time_stage_1, bk.load_time_stage_2))
print()
def show_fonts(bk):
print("Fonts:")
for x in xrange(len(bk.font_list)):
font = bk.font_list[x]
font.dump(header='== Index %d ==' % x, indent=4)
def show_names(bk, dump=0):
bk_header(bk)
if bk.biff_version < 50:
print("Names not extracted in this BIFF version")
return
nlist = bk.name_obj_list
print("Name list: %d entries" % len(nlist))
for nobj in nlist:
if dump:
nobj.dump(sys.stdout,
header="\n=== Dump of name_obj_list[%d] ===" % nobj.name_index)
else:
print("[%d]\tName:%r macro:%r scope:%d\n\tresult:%r\n"
% (nobj.name_index, nobj.name, nobj.macro, nobj.scope, nobj.result))
def print_labels(sh, labs, title):
if not labs:return
for rlo, rhi, clo, chi in labs:
print("%s label range %s:%s contains:"
% (title, xlrd.cellname(rlo, clo), xlrd.cellname(rhi-1, chi-1)))
for rx in xrange(rlo, rhi):
for cx in xrange(clo, chi):
print(" %s: %r" % (xlrd.cellname(rx, cx), sh.cell_value(rx, cx)))
def show_labels(bk):
# bk_header(bk)
hdr = 0
for shx in range(bk.nsheets):
sh = bk.sheet_by_index(shx)
clabs = sh.col_label_ranges
rlabs = sh.row_label_ranges
if clabs or rlabs:
if not hdr:
bk_header(bk)
hdr = 1
print("sheet %d: name = %r; nrows = %d; ncols = %d" %
(shx, sh.name, sh.nrows, sh.ncols))
print_labels(sh, clabs, 'Col')
print_labels(sh, rlabs, 'Row')
if bk.on_demand: bk.unload_sheet(shx)
def show(bk, nshow=65535, printit=1):
bk_header(bk)
if 0:
rclist = xlrd.sheet.rc_stats.items()
rclist = sorted(rclist)
print("rc stats")
for k, v in rclist:
print("0x%04x %7d" % (k, v))
if options.onesheet:
try:
shx = int(options.onesheet)
except ValueError:
shx = bk.sheet_by_name(options.onesheet).number
shxrange = [shx]
else:
shxrange = range(bk.nsheets)
# print("shxrange", list(shxrange))
for shx in shxrange:
sh = bk.sheet_by_index(shx)
nrows, ncols = sh.nrows, sh.ncols
colrange = range(ncols)
anshow = min(nshow, nrows)
print("sheet %d: name = %s; nrows = %d; ncols = %d" %
(shx, REPR(sh.name), sh.nrows, sh.ncols))
if nrows and ncols:
# Beat the bounds
for rowx in xrange(nrows):
nc = sh.row_len(rowx)
if nc:
_junk = sh.row_types(rowx)[nc-1]
_junk = sh.row_values(rowx)[nc-1]
_junk = sh.cell(rowx, nc-1)
for rowx in xrange(anshow-1):
if not printit and rowx % 10000 == 1 and rowx > 1:
print("done %d rows" % (rowx-1,))
show_row(bk, sh, rowx, colrange, printit)
if anshow and nrows:
show_row(bk, sh, nrows-1, colrange, printit)
print()
if bk.on_demand: bk.unload_sheet(shx)
def count_xfs(bk):
bk_header(bk)
for shx in range(bk.nsheets):
sh = bk.sheet_by_index(shx)
nrows, ncols = sh.nrows, sh.ncols
print("sheet %d: name = %r; nrows = %d; ncols = %d" %
(shx, sh.name, sh.nrows, sh.ncols))
# Access all xfindexes to force gathering stats
type_stats = [0, 0, 0, 0, 0, 0, 0]
for rowx in xrange(nrows):
for colx in xrange(sh.row_len(rowx)):
xfx = sh.cell_xf_index(rowx, colx)
assert xfx >= 0
cty = sh.cell_type(rowx, colx)
type_stats[cty] += 1
print("XF stats", sh._xf_index_stats)
print("type stats", type_stats)
print()
if bk.on_demand: bk.unload_sheet(shx)
def main(cmd_args):
import optparse
global options, PSYCO
usage = "\n%prog [options] command [input-file-patterns]\n" + cmd_doc
oparser = optparse.OptionParser(usage)
oparser.add_option(
"-l", "--logfilename",
default="",
help="contains error messages")
oparser.add_option(
"-v", "--verbosity",
type="int", default=0,
help="level of information and diagnostics provided")
oparser.add_option(
"-m", "--mmap",
type="int", default=-1,
help="1: use mmap; 0: don't use mmap; -1: accept heuristic")
oparser.add_option(
"-e", "--encoding",
default="",
help="encoding override")
oparser.add_option(
"-f", "--formatting",
type="int", default=0,
help="0 (default): no fmt info\n"
"1: fmt info (all cells)\n"
)
oparser.add_option(
"-g", "--gc",
type="int", default=0,
help="0: auto gc enabled; 1: auto gc disabled, manual collect after each file; 2: no gc")
oparser.add_option(
"-s", "--onesheet",
default="",
help="restrict output to this sheet (name or index)")
oparser.add_option(
"-u", "--unnumbered",
action="store_true", default=0,
help="omit line numbers or offsets in biff_dump")
oparser.add_option(
"-d", "--on-demand",
action="store_true", default=0,
help="load sheets on demand instead of all at once")
oparser.add_option(
"-t", "--suppress-timing",
action="store_true", default=0,
help="don't print timings (diffs are less messy)")
oparser.add_option(
"-r", "--ragged-rows",
action="store_true", default=0,
help="open_workbook(..., ragged_rows=True)")
options, args = oparser.parse_args(cmd_args)
if len(args) == 1 and args[0] in ("version", ):
pass
elif len(args) < 2:
oparser.error("Expected at least 2 args, found %d" % len(args))
cmd = args[0]
xlrd_version = getattr(xlrd, "__VERSION__", "unknown; before 0.5")
if cmd == 'biff_dump':
xlrd.dump(args[1], unnumbered=options.unnumbered)
sys.exit(0)
if cmd == 'biff_count':
xlrd.count_records(args[1])
sys.exit(0)
if cmd == 'version':
print("xlrd: %s, from %s" % (xlrd_version, xlrd.__file__))
print("Python:", sys.version)
sys.exit(0)
if options.logfilename:
logfile = LogHandler(open(options.logfilename, 'w'))
else:
logfile = sys.stdout
mmap_opt = options.mmap
mmap_arg = xlrd.USE_MMAP
if mmap_opt in (1, 0):
mmap_arg = mmap_opt
elif mmap_opt != -1:
print('Unexpected value (%r) for mmap option -- assuming default' % mmap_opt)
fmt_opt = options.formatting | (cmd in ('xfc', ))
gc_mode = options.gc
if gc_mode:
gc.disable()
for pattern in args[1:]:
for fname in glob.glob(pattern):
print("\n=== File: %s ===" % fname)
if logfile != sys.stdout:
logfile.setfileheading("\n=== File: %s ===\n" % fname)
if gc_mode == 1:
n_unreachable = gc.collect()
if n_unreachable:
print("GC before open:", n_unreachable, "unreachable objects")
if PSYCO:
import psyco
psyco.full()
PSYCO = 0
try:
t0 = time.time()
bk = xlrd.open_workbook(fname,
verbosity=options.verbosity, logfile=logfile,
use_mmap=mmap_arg,
encoding_override=options.encoding,
formatting_info=fmt_opt,
on_demand=options.on_demand,
ragged_rows=options.ragged_rows,
)
t1 = time.time()
if not options.suppress_timing:
print("Open took %.2f seconds" % (t1-t0,))
except xlrd.XLRDError as e:
print("*** Open failed: %s: %s" % (type(e).__name__, e))
continue
except KeyboardInterrupt:
print("*** KeyboardInterrupt ***")
traceback.print_exc(file=sys.stdout)
sys.exit(1)
except BaseException as e:
print("*** Open failed: %s: %s" % (type(e).__name__, e))
traceback.print_exc(file=sys.stdout)
continue
t0 = time.time()
if cmd == 'hdr':
bk_header(bk)
elif cmd == 'ov': # OverView
show(bk, 0)
elif cmd == 'show': # all rows
show(bk)
elif cmd == '2rows': # first row and last row
show(bk, 2)
elif cmd == '3rows': # first row, 2nd row and last row
show(bk, 3)
elif cmd == 'bench':
show(bk, printit=0)
elif cmd == 'fonts':
bk_header(bk)
show_fonts(bk)
elif cmd == 'names': # named reference list
show_names(bk)
elif cmd == 'name_dump': # named reference list
show_names(bk, dump=1)
elif cmd == 'labels':
show_labels(bk)
elif cmd == 'xfc':
count_xfs(bk)
else:
print("*** Unknown command <%s>" % cmd)
sys.exit(1)
del bk
if gc_mode == 1:
n_unreachable = gc.collect()
if n_unreachable:
print("GC post cmd:", fname, "->", n_unreachable, "unreachable objects")
if not options.suppress_timing:
t1 = time.time()
print("\ncommand took %.2f seconds\n" % (t1-t0,))
return None
av = sys.argv[1:]
if not av:
main(av)
firstarg = av[0].lower()
if firstarg == "hotshot":
import hotshot, hotshot.stats
av = av[1:]
prof_log_name = "XXXX.prof"
prof = hotshot.Profile(prof_log_name)
# benchtime, result = prof.runcall(main, *av)
result = prof.runcall(main, *(av, ))
print("result", repr(result))
prof.close()
stats = hotshot.stats.load(prof_log_name)
stats.strip_dirs()
stats.sort_stats('time', 'calls')
stats.print_stats(20)
elif firstarg == "profile":
import cProfile
av = av[1:]
cProfile.run('main(av)', 'YYYY.prof')
import pstats
p = pstats.Stats('YYYY.prof')
p.strip_dirs().sort_stats('cumulative').print_stats(30)
elif firstarg == "psyco":
PSYCO = 1
main(av[1:])
else:
main(av)
| 39.286747
| 101
| 0.50276
|
6aff2f810447a6eef6b2eab696fdac584f35e0f9
| 461
|
py
|
Python
|
src/main/python/gemini/beam_pydss_processing.py
|
cday97/beam
|
7e1ab50eecaefafd04daab360f8b12bc7cab559b
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2021-06-23T00:19:42.000Z
|
2021-06-23T00:19:42.000Z
|
src/main/python/gemini/beam_pydss_processing.py
|
cday97/beam
|
7e1ab50eecaefafd04daab360f8b12bc7cab559b
|
[
"BSD-3-Clause-LBNL"
] | 3
|
2020-12-10T19:12:31.000Z
|
2020-12-11T16:01:43.000Z
|
src/main/python/gemini/beam_pydss_processing.py
|
cday97/beam
|
7e1ab50eecaefafd04daab360f8b12bc7cab559b
|
[
"BSD-3-Clause-LBNL"
] | 1
|
2022-03-21T19:39:32.000Z
|
2022-03-21T19:39:32.000Z
|
import os
basefolder = "/Users/haitamlaarabi/Data/GEMINI/2021March22/370k-warmstart/output"
beamLog_out = "{}/beamLog.out".format(basefolder)
beamLog_out_csv = "{}/beamLog.csv".format(basefolder)
file1 = open(beamLog_out, 'r')
Lines = file1.readlines()
file2 = open(beamLog_out_csv, 'w')
# Strips the newline character
for line in Lines:
if "DELETE-THIS-" in line:
file2.writelines(line)
print(line)
file1.close()
file2.close()
print("END")
| 28.8125
| 81
| 0.720174
|
2a12fd47548ccd8bef8780c2789c325774728994
| 580
|
py
|
Python
|
django_shares/views/auth.py
|
InfoAgeTech/django-shares
|
1b301852fa261a7eb6c872dc912517368da6cb33
|
[
"MIT"
] | null | null | null |
django_shares/views/auth.py
|
InfoAgeTech/django-shares
|
1b301852fa261a7eb6c872dc912517368da6cb33
|
[
"MIT"
] | null | null | null |
django_shares/views/auth.py
|
InfoAgeTech/django-shares
|
1b301852fa261a7eb6c872dc912517368da6cb33
|
[
"MIT"
] | null | null | null |
from django.core.exceptions import PermissionDenied
class ShareRequiredViewMixin(object):
"""Share mixin that ensures the authenticated user has a share to the
object being viewed or get a permission denied.
This method assumes the following mixin has already been called:
* django_shares.mixins.views.SharedObjectSharesViewMixin
"""
def dispatch(self, *args, **kwargs):
if not getattr(self, 'shared_object_user_share', None):
raise PermissionDenied
return super(ShareRequiredViewMixin, self).dispatch(*args, **kwargs)
| 34.117647
| 76
| 0.72931
|
744765130def8a1b28ebe4cc5cac3ffa1667b884
| 1,916
|
py
|
Python
|
src/keyboard.py
|
coders-creed/botathon
|
b98e6302d43cd1bcad5c79fbb3d8b02b3cfab2ae
|
[
"MIT"
] | 1
|
2017-02-06T22:32:05.000Z
|
2017-02-06T22:32:05.000Z
|
src/keyboard.py
|
coders-creed/botathon
|
b98e6302d43cd1bcad5c79fbb3d8b02b3cfab2ae
|
[
"MIT"
] | null | null | null |
src/keyboard.py
|
coders-creed/botathon
|
b98e6302d43cd1bcad5c79fbb3d8b02b3cfab2ae
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Author: chandan
# @Date: 2016-12-11 09:49:42
# @Last Modified by: chandan
# @Last Modified time: 2016-12-11 09:50:08
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Basic example for a bot that uses inline keyboards.
# This program is dedicated to the public domain under the CC0 license.
import logging
from telegram import InlineKeyboardButton, InlineKeyboardMarkup
from telegram.ext import Updater, CommandHandler, CallbackQueryHandler
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
def start(bot, update):
keyboard = [[InlineKeyboardButton("Option 1", callback_data='1'),
InlineKeyboardButton("Option 2", callback_data='2')],
[InlineKeyboardButton("Option 3", callback_data='3')]]
reply_markup = InlineKeyboardMarkup(keyboard)
update.message.reply_text('Please choose:', reply_markup=reply_markup)
def button(bot, update):
query = update.callback_query
bot.editMessageText(text="Selected option: %s" % query.data,
chat_id=query.message.chat_id,
message_id=query.message.message_id)
def help(bot, update):
update.message.reply_text("Use /start to test this bot.")
def error(bot, update, error):
logging.warning('Update "%s" caused error "%s"' % (update, error))
# Create the Updater and pass it your bot's token.
API = '321593047:AAHu3OSD71i8II0exHJGFTkVIxwOtvEGJlo'
updater = Updater(token=API)
updater.dispatcher.add_handler(CommandHandler('start', start))
updater.dispatcher.add_handler(CallbackQueryHandler(button))
updater.dispatcher.add_handler(CommandHandler('help', help))
updater.dispatcher.add_error_handler(error)
# Start the Bot
updater.start_polling()
# Run the bot until the user presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT
updater.idle()
| 31.409836
| 82
| 0.70929
|
1ec94519e285dcf3f19f57935728514ea81581b6
| 3,759
|
py
|
Python
|
tests/servers/test_windows.py
|
ginjeni1/mountequist
|
795d6921e41be8c3097c7c4a06f0ad0a1e4742b5
|
[
"MIT"
] | null | null | null |
tests/servers/test_windows.py
|
ginjeni1/mountequist
|
795d6921e41be8c3097c7c4a06f0ad0a1e4742b5
|
[
"MIT"
] | null | null | null |
tests/servers/test_windows.py
|
ginjeni1/mountequist
|
795d6921e41be8c3097c7c4a06f0ad0a1e4742b5
|
[
"MIT"
] | null | null | null |
import os
import subprocess
import sys
import pytest
from mountequist import exceptions
from mountequist.servers import windows
from mountequist.util import get_root_mountebank_path
from tests.defaults import DEFAULT_TEST_PATH
from tests.helpers import is_process_active
windows_only = pytest.mark.skipif(sys.platform != "win32", reason="Windows Only")
@windows_only
def test_can_find_config(mark_for_removal, mountebank_install):
config_file_path = os.path.join(mountebank_install, "test.json")
with open(config_file_path, "w") as temp_file:
temp_file.write("TEST")
mark_for_removal(config_file_path)
result = windows.WindowsServer._find_config(mountebank_install, "test.json")
assert result == config_file_path
@windows_only
def test_properly_prepares_basic_argument(mountebank_install):
args = windows.WindowsServer._prepare_arguments(
mountebank_install,
local_host_only=False)
assert args[0] == os.path.join(mountebank_install, windows.BIN_FILE)
assert len(args) == 1
@windows_only
def test_properly_prepares_all_arguments(mark_for_removal, mountebank_install):
config_file_path = os.path.join(mountebank_install, "test")
with open(config_file_path, "w") as temp_file:
temp_file.write("TEST")
mark_for_removal(config_file_path)
args = windows.WindowsServer._prepare_arguments(
mountebank_path=mountebank_install,
config_file_name="test",
port=3636)
assert args[0] == os.path.join(mountebank_install, windows.BIN_FILE)
assert "--configfile test" in args
assert "--localOnly" in args
assert "--port 3636" in args
assert len(args) == 4
@pytest.mark.skip('The process still seem active after the server is stopped.')
@windows_only
def test_can_start_and_stop_mountebank(mountebank_install):
server = windows.WindowsServer(DEFAULT_TEST_PATH)
with server:
pid = server.process.pid
assert server.process is not None
assert server.process.poll() is None
assert server.process is None
assert is_process_active(pid) is False
@windows_only
def test_can_find_bin_file(mountebank_install):
server = windows.WindowsServer(DEFAULT_TEST_PATH)
with server:
assert "mountebank/bin/mb" in server._find_bin_file(server.mountebank_path)
@windows_only
def test_can_save_config(mountebank_install):
server = windows.WindowsServer(DEFAULT_TEST_PATH)
with server:
server.save_config_file("TEST.json")
assert server._find_config(server.mountebank_path, "TEST.json")
@windows_only
def test_can_load_new_config(mountebank_install, mark_for_removal, sample_config):
root = get_root_mountebank_path(DEFAULT_TEST_PATH)
config_file_path = os.path.join(root, "example.json")
mark_for_removal(config_file_path)
with open(config_file_path, 'w') as config_file:
config_file.write(sample_config)
server = windows.WindowsServer(DEFAULT_TEST_PATH)
with server:
server.load_config_file("example.json")
assert server.process.poll() is None
@windows_only
def test_succeeds_polling():
process = subprocess.Popen("cmd")
return_code = windows.WindowsServer._poll_and_timeout(process)
assert return_code == 0
@windows_only
def test_properly_times_out_polling():
process = subprocess.Popen("cmd /K")
with pytest.raises(exceptions.TimeoutError):
windows.WindowsServer._poll_and_timeout(process, 0.1)
process.kill()
@pytest.fixture
def sample_config(mark_for_removal):
return ('{"imposters": [{"protocol": "http","port": 61486,'
'"stubs": [{"responses": [{"is": {"body": "All Ok","headers":'
'{"Connection": "close"},"_mode": "text","statusCode": 200}}]}]}]}')
| 31.588235
| 83
| 0.741687
|
9ca4ed668cc77b9cf2f4da33a0e6899b47ca5a3c
| 457
|
py
|
Python
|
hc/accounts/migrations/0004_profile_api_key.py
|
andela/healthchecks_aphas
|
a08f9208f569f6bd3e51ef40b9e95dfc675705a1
|
[
"BSD-3-Clause"
] | 3
|
2017-12-14T03:23:59.000Z
|
2021-07-07T13:19:32.000Z
|
hc/accounts/migrations/0004_profile_api_key.py
|
andela/healthchecks_aphas
|
a08f9208f569f6bd3e51ef40b9e95dfc675705a1
|
[
"BSD-3-Clause"
] | 34
|
2019-09-05T06:41:12.000Z
|
2021-06-25T15:25:28.000Z
|
hc/accounts/migrations/0004_profile_api_key.py
|
andela/healthchecks_aphas
|
a08f9208f569f6bd3e51ef40b9e95dfc675705a1
|
[
"BSD-3-Clause"
] | 30
|
2017-04-22T07:09:56.000Z
|
2019-06-30T08:24:01.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-16 12:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_profile_token'),
]
operations = [
migrations.AddField(
model_name='profile',
name='api_key',
field=models.CharField(blank=True, max_length=128),
),
]
| 21.761905
| 63
| 0.617068
|
a99747d7619ca494ac6e0807a7fe585a4ff56ce8
| 282
|
py
|
Python
|
backend/app/main.py
|
uedaeita/bestsell
|
b452017168aae10799d0b8f4a8f2dd83b39635d0
|
[
"MIT"
] | null | null | null |
backend/app/main.py
|
uedaeita/bestsell
|
b452017168aae10799d0b8f4a8f2dd83b39635d0
|
[
"MIT"
] | null | null | null |
backend/app/main.py
|
uedaeita/bestsell
|
b452017168aae10799d0b8f4a8f2dd83b39635d0
|
[
"MIT"
] | null | null | null |
from typing import Dict
from fastapi import FastAPI
from app.api.v1.api import api_router
app = FastAPI(
title="BestSell",
root_path="/bestsell/api",
)
@app.get("/")
def health_check() -> Dict[str, str]:
return {"version": "1.0.0"}
app.include_router(api_router)
| 14.842105
| 37
| 0.680851
|
9a8b40cb263b3d9d1074b43df307b0f5347e0b72
| 3,940
|
py
|
Python
|
kubernetes_asyncio/client/models/v1alpha1_group_subject.py
|
icamposrivera/kubernetes_asyncio
|
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/models/v1alpha1_group_subject.py
|
icamposrivera/kubernetes_asyncio
|
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/models/v1alpha1_group_subject.py
|
icamposrivera/kubernetes_asyncio
|
f028cc793e3a2c519be6a52a49fb77ff0b014c9b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.19.15
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes_asyncio.client.configuration import Configuration
class V1alpha1GroupSubject(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'str'
}
attribute_map = {
'name': 'name'
}
def __init__(self, name=None, local_vars_configuration=None): # noqa: E501
"""V1alpha1GroupSubject - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self.discriminator = None
self.name = name
@property
def name(self):
"""Gets the name of this V1alpha1GroupSubject. # noqa: E501
name is the user group that matches, or \"*\" to match all user groups. See https://github.com/kubernetes/apiserver/blob/master/pkg/authentication/user/user.go for some well-known group names. Required. # noqa: E501
:return: The name of this V1alpha1GroupSubject. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1alpha1GroupSubject.
name is the user group that matches, or \"*\" to match all user groups. See https://github.com/kubernetes/apiserver/blob/master/pkg/authentication/user/user.go for some well-known group names. Required. # noqa: E501
:param name: The name of this V1alpha1GroupSubject. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1GroupSubject):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1alpha1GroupSubject):
return True
return self.to_dict() != other.to_dict()
| 31.774194
| 224
| 0.601523
|
440bb53284285a6e49f9a59028777474b4773663
| 4,936
|
py
|
Python
|
herokuapp/main.py
|
Quuxplusone/cwg-issue-browser
|
f4c245d59ae3518865fc6c46f788f2759b6bf346
|
[
"MIT"
] | 1
|
2019-05-28T00:14:31.000Z
|
2019-05-28T00:14:31.000Z
|
herokuapp/main.py
|
Quuxplusone/cwg-issue-browser
|
f4c245d59ae3518865fc6c46f788f2759b6bf346
|
[
"MIT"
] | null | null | null |
herokuapp/main.py
|
Quuxplusone/cwg-issue-browser
|
f4c245d59ae3518865fc6c46f788f2759b6bf346
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import bottle
from bottle import Bottle
import os
import re
import requests
import time
app = Bottle()
bottle.TEMPLATE_PATH.insert(0, os.path.join(os.path.dirname(os.path.realpath(__file__)), 'views'))
class NoSuchIssueException(Exception):
def __init__(self, n):
self.issue_number = n
class UpstreamNotParseableException(Exception):
def __init__(self, r, url):
self.r = r
self.status_code = r.status_code
self.text = r.text
self.url = url
class UpstreamUnreachableException(Exception):
def __init__(self, r, url):
self.r = r
self.status_code = r.status_code
self.text = r.text
self.url = url
def get_full_cwg_page(url):
r = requests.get(url)
text = r.text
if r.status_code != 200:
raise UpstreamUnreachableException(r, url)
if not text.startswith('<HTML>'):
raise UpstreamNotParseableException(r, url)
return text
def get_snippet(page_text, issue_id):
anchor_tag = '<A NAME="%s">' % issue_id
try:
start_idx = page_text.index(anchor_tag)
except ValueError:
raise NoSuchIssueException(issue_id)
try:
end_idx = page_text.index('<A NAME="', start_idx + 1)
except ValueError:
end_idx = len(page_text)
return page_text[start_idx:end_idx]
class Issue:
def __init__(self, status, text):
self.status = status
self.text = text
class PageCache:
def __init__(self, urls):
self.issues = {} # str(id) -> Issue
self.url_sizes = {} # str(url) -> int
self.last_fetch_time = 0
self.urls = urls
def refresh(self):
for url in self.urls:
status = 'active' if 'active' in url else 'closed' if 'closed' in url else 'defect'
try:
page_text = get_full_cwg_page(url)
self.url_sizes[url] = len(page_text)
issue_ids = [m.group(1) for m in re.finditer(r'<A NAME="(\d+)">', page_text)]
for issue_id in issue_ids:
self.issues[issue_id] = Issue(status, get_snippet(page_text, issue_id))
except UpstreamUnreachableException:
pass
self.last_fetch_time = time.time()
def maybe_refresh(self):
if time.time() - self.last_fetch_time >= 3600:
self.refresh()
def get_issue(self, issue_id):
issue = self.issues.get(issue_id)
if issue is None:
raise NoSuchIssueException(issue_id)
return issue
def get_issues_and_statuses(self):
result = list(self.issues.keys())
result.sort(key=int)
return [(issue_id, self.issues[issue_id].status) for issue_id in result]
def to_human_readable(self, page_size):
if page_size < 10000:
return '%d bytes' % page_size
elif page_size < 10000000:
return '%d KB' % (page_size / 1000)
elif page_size < 10000000000:
return '%d MB' % (page_size / 1000000)
else:
return '%d GB' % (page_size / 1000000000)
def get_urls_and_sizes(self):
return [
(url, self.to_human_readable(size))
for url, size in self.url_sizes.items()
]
gPageCache = PageCache([
'http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_active.html',
'http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_closed.html',
'http://www.open-std.org/jtc1/sc22/wg21/docs/cwg_defects.html',
])
@app.get('/robots.txt')
def robots_txt():
bottle.response.content_type = 'text/plain'
return 'User-agent: *\nDisallow: /\n'
@app.get('/')
@app.get('/index.html')
def home():
gPageCache.maybe_refresh()
return bottle.template('index.tpl', {
'urls_and_sizes': gPageCache.get_urls_and_sizes(),
'issues_and_statuses': gPageCache.get_issues_and_statuses(),
})
@app.get('/<cwgn:re:cwg[0-9]+>')
def issue_page(cwgn):
gPageCache.maybe_refresh()
issue_id = cwgn[3:]
try:
issue = gPageCache.get_issue(issue_id)
return bottle.template('issue.tpl', {
'issue_id': issue_id,
'issue_text': issue.text,
'status': issue.status,
})
except NoSuchIssueException as ex:
return bottle.template('nosuchissue.tpl', {
'issue_number': ex.issue_number,
})
except UpstreamNotParseableException as ex:
return bottle.template('upstreamnotparseable.tpl', {
'status_code': ex.status_code,
'text': ex.text[:1000],
'url': ex.url,
})
except UpstreamUnreachableException as ex:
return bottle.template('upstreamunreachable.tpl', {
'status_code': ex.status_code,
'text': ex.text[:1000],
'url': ex.url,
})
if __name__ == '__main__':
port = int(os.environ.get('PORT', 8080))
app.run(host='0.0.0.0', port=port)
| 28.697674
| 98
| 0.608185
|
e32e910cac0d3c44e88e8fad2d0ff65892a6a12f
| 3,541
|
py
|
Python
|
chapter11-语音识别(LSTM-CTC)/data.py
|
1364354238/PYTORCH_LEARNING
|
d7ab877512ab41c80b37ab68bd1a42193916f31c
|
[
"MIT"
] | 137
|
2018-11-13T06:35:49.000Z
|
2022-03-07T09:21:31.000Z
|
chapter11-语音识别(LSTM-CTC)/data.py
|
WQAQs/PYTORCH_LEARNING
|
d7ab877512ab41c80b37ab68bd1a42193916f31c
|
[
"MIT"
] | null | null | null |
chapter11-语音识别(LSTM-CTC)/data.py
|
WQAQs/PYTORCH_LEARNING
|
d7ab877512ab41c80b37ab68bd1a42193916f31c
|
[
"MIT"
] | 54
|
2018-11-13T09:38:37.000Z
|
2022-03-25T03:46:25.000Z
|
#encoding=utf-8
#本文件继承构建了Dataset类和DataLoader类,用来处理音频和标签文件
#转化为网络可输入的格式
import os
import torch
import scipy.signal
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
from utils import parse_audio, process_label_file
windows = {'hamming':scipy.signal.hamming, 'hann':scipy.signal.hann, 'blackman':scipy.signal.blackman,
'bartlett':scipy.signal.bartlett}
audio_conf = {"sample_rate":16000, 'window_size':0.025, 'window_stride':0.01, 'window': 'hamming'}
int2char = ["_", "'", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p",
"q", "r", "s", "t", "u", "v", "w", "x", "y", "z", " "]
class SpeechDataset(Dataset):
def __init__(self, data_dir, data_set='train', normalize=True):
self.data_set = data_set
self.normalize = normalize
self.char2int = {}
self.n_feats = int(audio_conf['sample_rate']*audio_conf['window_size']/2+1)
for i in range(len(int2char)):
self.char2int[int2char[i]] = i
wav_path = os.path.join(data_dir, data_set+'_wav.scp')
label_file = os.path.join(data_dir, data_set+'.text')
self.process_audio(wav_path, label_file)
def process_audio(self, wav_path, label_file):
#read the label file
self.label = process_label_file(label_file, self.char2int)
#read the path file
self.path = []
with open(wav_path, 'r') as f:
for line in f.readlines():
utt, path = line.strip().split()
self.path.append(path)
#ensure the same samples of input and label
assert len(self.label) == len(self.path)
def __getitem__(self, idx):
return parse_audio(self.path[idx], audio_conf, windows, normalize=self.normalize), self.label[idx]
def __len__(self):
return len(self.path)
def collate_fn(batch):
#将输入和标签转化为可输入网络的batch
#batch : batch_size * (seq_len * nfeats, target_length)
def func(p):
return p[0].size(0)
#sort batch according to the frame nums
batch = sorted(batch, reverse=True, key=func)
longest_sample = batch[0][0]
feat_size = longest_sample.size(1)
max_length = longest_sample.size(0)
batch_size = len(batch)
inputs = torch.zeros(batch_size, max_length, feat_size) #网络输入,相当于长度不等的补0
input_sizes = torch.IntTensor(batch_size) #输入每个样本的序列长度,即帧数
target_sizes = torch.IntTensor(batch_size) #每句标签的长度
targets = []
input_size_list = []
for x in range(batch_size):
sample = batch[x]
feature = sample[0]
label = sample[1]
seq_length = feature.size(0)
inputs[x].narrow(0, 0, seq_length).copy_(feature)
input_sizes[x] = seq_length
input_size_list.append(seq_length)
target_sizes[x] = len(label)
targets.extend(label)
targets = torch.IntTensor(targets)
return inputs, targets, input_sizes, input_size_list, target_sizes
'''
class torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False,
sampler=None, batch_sampler=None, num_workers=0,
collate_fn=<function default_collate>,
pin_memory=False, drop_last=False)
'''
class SpeechDataLoader(DataLoader):
def __init__(self, *args, **kwargs):
super(SpeechDataLoader, self).__init__(*args, **kwargs)
self.collate_fn = collate_fn
| 36.885417
| 106
| 0.614516
|
419e7b9bdef5c7567024c51b6f7036ffd6a822b9
| 318
|
py
|
Python
|
s2cnn/__init__.py
|
aycatakmaz/s2cnn
|
b75efee458686e7d7ecb4c337402c668ede0dece
|
[
"MIT"
] | 853
|
2017-12-03T19:03:02.000Z
|
2021-07-23T13:41:30.000Z
|
s2cnn/__init__.py
|
aycatakmaz/s2cnn
|
b75efee458686e7d7ecb4c337402c668ede0dece
|
[
"MIT"
] | 52
|
2018-04-13T15:33:47.000Z
|
2021-07-28T19:41:56.000Z
|
s2cnn/__init__.py
|
aycatakmaz/s2cnn
|
b75efee458686e7d7ecb4c337402c668ede0dece
|
[
"MIT"
] | 169
|
2017-12-15T17:47:24.000Z
|
2021-07-10T09:18:15.000Z
|
# pylint: disable=R,C,E1101,W0401
from .s2_ft import s2_rft
from .so3_ft import so3_rft
from .s2_grid import s2_near_identity_grid, s2_equatorial_grid, s2_soft_grid
from .so3_grid import so3_near_identity_grid, so3_equatorial_grid, so3_soft_grid
from .s2_mm import s2_mm
from .so3_mm import so3_mm
from .soft import *
| 35.333333
| 80
| 0.833333
|
3e0a4855905b15b4fcf3de92458339e12e8741b7
| 805
|
py
|
Python
|
users/migrations/0001_initial.py
|
royaleagle-dev/flinch
|
789262cc207b3fa10ab6882ef8aa72ab368bee85
|
[
"Apache-2.0"
] | null | null | null |
users/migrations/0001_initial.py
|
royaleagle-dev/flinch
|
789262cc207b3fa10ab6882ef8aa72ab368bee85
|
[
"Apache-2.0"
] | null | null | null |
users/migrations/0001_initial.py
|
royaleagle-dev/flinch
|
789262cc207b3fa10ab6882ef8aa72ab368bee85
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.2.7 on 2020-04-19 01:33
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('phone', models.CharField(max_length=20)),
('totalCart', models.IntegerField(default=0)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 29.814815
| 121
| 0.63354
|
a83f1af2e7e8c7126eb0bf469f4e4d20dfa91f10
| 4,279
|
py
|
Python
|
benchmark/startQiskit_QC2681.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_QC2681.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_QC2681.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=4
# total number=35
import cirq
import qiskit
from qiskit import IBMQ
from qiskit.providers.ibmq import least_busy
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.x(input_qubit[3]) # number=1
prog.h(input_qubit[0]) # number=18
prog.x(input_qubit[1]) # number=28
prog.cz(input_qubit[3],input_qubit[0]) # number=19
prog.h(input_qubit[2]) # number=24
prog.h(input_qubit[0]) # number=20
prog.rx(-1.8378317023500288,input_qubit[1]) # number=25
prog.z(input_qubit[3]) # number=14
prog.cx(input_qubit[3],input_qubit[0]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[3]) # number=16
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.h(input_qubit[3]) # number=29
prog.cz(input_qubit[0],input_qubit[3]) # number=30
prog.h(input_qubit[3]) # number=31
prog.cx(input_qubit[0],input_qubit[3]) # number=32
prog.x(input_qubit[3]) # number=33
prog.cx(input_qubit[0],input_qubit[3]) # number=34
prog.cx(input_qubit[0],input_qubit[3]) # number=23
prog.z(input_qubit[1]) # number=26
prog.x(input_qubit[2]) # number=11
prog.x(input_qubit[2]) # number=12
prog.z(input_qubit[1]) # number=27
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = least_busy(provider.backends(filters=lambda x: x.configuration().n_qubits >= 2 and not x.configuration().simulator and x.status().operational == True))
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_QC2681.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 35.07377
| 165
| 0.655293
|
53fe057302123598794eaa2a073e92211f6d7739
| 4,369
|
py
|
Python
|
tests/benchmarks.py
|
smartfile/wsgidav
|
17df81a4b8b92c153d7a50a463a60bbe0868545e
|
[
"MIT"
] | 1
|
2015-04-28T19:16:01.000Z
|
2015-04-28T19:16:01.000Z
|
tests/benchmarks.py
|
smartfile/wsgidav
|
17df81a4b8b92c153d7a50a463a60bbe0868545e
|
[
"MIT"
] | null | null | null |
tests/benchmarks.py
|
smartfile/wsgidav
|
17df81a4b8b92c153d7a50a463a60bbe0868545e
|
[
"MIT"
] | null | null | null |
# -*- coding: iso-8859-1 -*-
# (c) 2009-2014 Martin Wendt and contributors; see WsgiDAV https://github.com/mar10/wsgidav
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
"""
Benchmark suite for WsgiDAV.
This test suite uses davclient to generate WebDAV requests.
A first collection of ideas
===========================
- The result is printable HTML, copy/pastable
- It also contains date, environment info (Hardware, package versions, ...)
- The suite can be run stand-alone against a running WsgiDAV server, just like
litmus.
- It uses `davclient` and generates an HTML file.
- There should be detailed results as well as a few summarizing numbers:
('Total time', 'Byte reads per second', 'Byte write per second', or something
like this), so one can compare benchmarks at a glance.
- Optional parameters allow to run only a single test
- Parameter allows to pass configuration infos that are dumped with the result:
benchEnviron = {
"comment": "Test with caching enabled",
"server_os": "Ubuntu 9.01",
"server_cpu": "Intel 3GHz",
"server_ram": "2GB",
"wsgidav_version": "0.4.b1"
"network_bandwidth": "100MBit",
>> these can be automatically set?:
"client_os": "Windows XP",
"client_cpu": "AMD 5000",
"date": now()
}
- Allow to print profiling info (from WsgiDAV server and from becnhmark client!)
- The result file could also contain the results of test suites ('PASSED'),
so we could use it as documentation for tests on different platforms/setups.
Questions
=========
- is lxml really faster?
- compare this to mod_dav's performance
Test cases
==========
- PUT 1 x 10 MB
- PUT 100 x 1 kB
- GET 1 x 10 MB
- GET 100 x 1 kB
- 100 x PROPFIND depth 0
- 1 x PROPFIND depth infinity
- COPY: big file, many small files, big tree
- MOVE: big file, many small files, big tree
- DELETE: big file, many small files, big tree
- LOCK
- UNLOCK
- Check if locked
- PROPPATCH
- PROPFIND: depth 0, many small files
depth infinity
- run litmus in a timed script
- Simulate typical Windows Client request sequences:
- dir browsing
- file reading
- file editing
- http://groups.google.com/group/paste-users/t/b2afc88a86caade1?hl=en
use httperf
http://www.hpl.hp.com/research/linux/httperf/httperf-man-0.9.txt
and openwebload
http://openwebload.sourceforge.net/index.html
- makeTree(roofolderName="/bench", folderCount=10, subfolderCount=10, fileCount=10, fileSize=1024)
Big tree with 100 folders and 1000 files
bench/
folder1/
..
folder10/
subfolder10-1/
..
subfolder10-10/
file10-10-1.txt -> 1k
"""
import logging
_benchmarks = [#"proppatch_many",
#"proppatch_big",
#"proppatch_deep",
"test_scripted",
]
def _real_run_bench(bench, opts):
if bench == "*":
for bench in _benchmarks:
run_bench(bench, opts)
return
assert bench in _benchmarks
if bench == "test_scripted":
from tests import test_scripted
test_scripted.main()
else:
raise ValueError()
def run_bench(bench, opts):
profile_benchmarks = opts["profile_benchmarks"]
if bench in profile_benchmarks:
# http://docs.python.org/library/profile.html#module-cProfile
import cProfile, pstats, StringIO
prof = cProfile.Profile()
prof = prof.runctx("_real_run_bench(bench, opts)", globals(), locals())
stream = StringIO.StringIO()
stats = pstats.Stats(prof, stream=stream)
# stats.sort_stats("time") # Or cumulative
stats.sort_stats("cumulative") # Or time
stats.print_stats(80) # 80 = how many to print
# The rest is optional.
# stats.print_callees()
# stats.print_callers()
logging.warning("Profile data for '%s':\n%s" % (bench, stream.getvalue()))
else:
_real_run_bench(bench, opts)
def bench_all(opts):
run_bench("*", opts)
def main():
opts = {"num": 10,
"profile_benchmarks": ["*"],
}
bench_all(opts)
if __name__ == "__main__":
main()
| 30.985816
| 99
| 0.624857
|
41e3619c3a2930f22602f9e068f254f6ba0b63e0
| 1,910
|
py
|
Python
|
position.py
|
sattila83/Robocar
|
3454b4ed15d985b0bfa1a9388c20546081124ae9
|
[
"MIT"
] | 1
|
2017-07-06T09:22:24.000Z
|
2017-07-06T09:22:24.000Z
|
position.py
|
sattila83/Robocar
|
3454b4ed15d985b0bfa1a9388c20546081124ae9
|
[
"MIT"
] | null | null | null |
position.py
|
sattila83/Robocar
|
3454b4ed15d985b0bfa1a9388c20546081124ae9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import math
EARTH_RADIUS = 6371e3;
class Position:
def __init__(self, lat = 0.0, lon = 0.0):
self.lat = math.radians(float(lat))
self.lon = math.radians(float(lon))
def __cmp__(self, other):
if 0.0 == self.distanceTo(other):
return 0
elif self.bearingTo(other) < other.bearingTo(self):
return -1
else:
return 1
def __repr__(self):
return "(%(lat)f,%(lon)f)" % { 'lat': self.lat, 'lon': self.lon }
def __str__(self):
return "(%(lat)f,%(lon)f)" % { 'lat': math.degrees(self.lat), 'lon': math.degrees(self.lon) }
@staticmethod
def toDecimal(value, direction):
parts = value.split('.')
decimal = float("0." + parts[1]) / 60.0 # convert seconds
decimal = decimal + (float(parts[0][-2:]) / 60.0) # convert minutes
decimal = decimal + float(parts[0][:-2]) # convert hours
if 'N' == direction or 'E' == direction:
return decimal
else:
return 0.0 - decimal
def distanceTo(self, otherPosition):
deltaLat = otherPosition.lat - self.lat
deltaLon = otherPosition.lon - self.lon
squareOfHalfChordLength = math.sin(deltaLat / 2) * math.sin(deltaLat / 2) + math.cos(self.lat) * math.cos(otherPosition.lat) * math.sin(deltaLon / 2) * math.sin(deltaLon / 2)
return EARTH_RADIUS * 2 * math.atan2(math.sqrt(squareOfHalfChordLength), math.sqrt(1 - squareOfHalfChordLength))
def bearingTo(self, otherPosition):
deltaLon = otherPosition.lon - self.lon
beeringInRadians = math.atan2(math.sin(deltaLon) * math.cos(otherPosition.lat), math.cos(self.lat) * math.sin(otherPosition.lat) - math.sin(self.lat) * math.cos(otherPosition.lat) * math.cos(deltaLon))
return (math.degrees(beeringInRadians) + 360.0) % 360.0
def isNull(self):
return self.lat == 0.0 and self.lon == 0.0
@staticmethod
def positionListToStr(positions):
s = "["
for p in positions:
s = s + str(p) + ", "
if len(s) > 1:
s = s[:-2]
s = s + "]"
return s
| 31.833333
| 203
| 0.663351
|
106bdbf8ef5d0f5b7c18d17104e19a900bd3184a
| 329
|
py
|
Python
|
homeassistant/util/yaml/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 23
|
2017-11-15T21:03:53.000Z
|
2021-03-29T21:33:48.000Z
|
homeassistant/util/yaml/__init__.py
|
domwillcode/home-assistant
|
f170c80bea70c939c098b5c88320a1c789858958
|
[
"Apache-2.0"
] | 52
|
2020-07-14T14:12:26.000Z
|
2022-03-31T06:24:02.000Z
|
homeassistant/util/yaml/__init__.py
|
klauern/home-assistant-core
|
c18ba6aec0627e6afb6442c678edb5ff2bb17db6
|
[
"Apache-2.0"
] | 10
|
2018-01-01T00:12:51.000Z
|
2021-12-21T23:08:05.000Z
|
"""YAML utility functions."""
from .const import _SECRET_NAMESPACE, SECRET_YAML
from .dumper import dump, save_yaml
from .loader import clear_secret_cache, load_yaml, secret_yaml
__all__ = [
"SECRET_YAML",
"_SECRET_NAMESPACE",
"dump",
"save_yaml",
"clear_secret_cache",
"load_yaml",
"secret_yaml",
]
| 21.933333
| 62
| 0.705167
|
325faf3a45c21df26e15c4bba8ca0c39031fa5b8
| 795
|
py
|
Python
|
var/spack/repos/builtin/packages/py-magic/package.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-magic/package.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 8
|
2021-11-09T20:28:40.000Z
|
2022-03-15T03:26:33.000Z
|
var/spack/repos/builtin/packages/py-magic/package.py
|
jeanbez/spack
|
f4e51ce8f366c85bf5aa0eafe078677b42dae1ba
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2019-02-08T20:37:20.000Z
|
2019-03-31T15:19:26.000Z
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyMagic(PythonPackage):
"""A python wrapper for libmagic.
.. warning::
DO NOT USE: this is a duplicate of py-python-magic and will be deleted.
"""
homepage = "https://github.com/ahupp/python-magic"
url = "https://github.com/ahupp/python-magic/archive/0.4.15.tar.gz"
version('0.4.15', sha256='6d730389249ab1e34ffb0a3c5beaa44e116687ffa081e0176dab6c59ff271593', deprecated=True)
depends_on('python@2.7.0:2.7,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('file', type='run')
| 33.125
| 113
| 0.700629
|
f0713ed5cd83ed0691f7e5b3f07899851b0aafc3
| 3,256
|
py
|
Python
|
jaxdl/rl/networks/actor_nets.py
|
patrickhart/jaxdl
|
032df55292410c2976703213e67fff7bcafaedbe
|
[
"MIT"
] | 1
|
2022-02-09T09:19:40.000Z
|
2022-02-09T09:19:40.000Z
|
jaxdl/rl/networks/actor_nets.py
|
patrickhart/jaxdl
|
032df55292410c2976703213e67fff7bcafaedbe
|
[
"MIT"
] | null | null | null |
jaxdl/rl/networks/actor_nets.py
|
patrickhart/jaxdl
|
032df55292410c2976703213e67fff7bcafaedbe
|
[
"MIT"
] | null | null | null |
"""Actor network implementations"""
import functools
from typing import Callable, Optional, Sequence, Tuple
import numpy as np
import flax.linen as nn
import jax
import jax.numpy as jnp
from tensorflow_probability.substrates import jax as tfp
tfb = tfp.bijectors
tfd = tfp.distributions
from jaxdl.utils.commons import PRNGKey, Module, TrainState
from jaxdl.nn.dnn.mlp import default_init, forward_mlp_fn
def create_normal_dist_policy_fn(
hidden_dims : Sequence[int] = [256, 256],
forward_fn: Callable = forward_mlp_fn) -> Callable:
"""Return a normal distribution actor policy
Args:
hidden_dims (Sequence[int], optional): Hidden dimension of network.
Defaults to [256, 256].
action_dim (int, optional): Action dimensions of environment.
Defaults to 2.
Returns:
Module: Returns a NormalDistPolicy
"""
def network_fn(action_dim: int):
return NormalDistPolicy(hidden_dims=hidden_dims,
action_dim=action_dim, forward_fn=forward_fn)
return network_fn
class NormalDistPolicy(nn.Module):
"""Normal distribution actor policy."""
hidden_dims: Sequence[int]
action_dim: int
log_std_scale: float = 1.0
log_std_min: float = -10.0
log_std_max: float = 2.0
tanh_squash_distribution: bool = True
forward_fn: Callable = forward_mlp_fn
dropout_rate: Optional[float] = None
@nn.compact
def __call__(self,
observations: jnp.ndarray,
temperature: float = 1.0,
training: bool = False) -> tfd.Distribution:
"""Calls the network
Args:
observations (jnp.ndarray): Observation from environment.
temperature (float, optional): Temperature parameter. Defaults to 1.0.
training (bool, optional): Mode. Defaults to False.
Returns:
tfd.Distribution: Tensorflow probability distribution
"""
# call networks
out = self.forward_fn(
hidden_dims=self.hidden_dims, dropout_rate=self.dropout_rate,
activate_final=True)(observations, training=training)
# means
means = nn.Dense(self.action_dim, kernel_init=default_init())(out)
# log standard deviations
log_stds = nn.Dense(self.action_dim,
kernel_init=default_init(self.log_std_scale))(out)
# clip log standard deviations
log_stds = jnp.clip(log_stds, self.log_std_min, self.log_std_max)
# create distrubutions
dist = tfd.MultivariateNormalDiag(
loc=means, scale_diag=jnp.exp(log_stds) * temperature)
# return distribution
if self.tanh_squash_distribution:
# will produce actions in [-1, 1]
return tfd.TransformedDistribution(distribution=dist, bijector=tfb.Tanh())
return dist
@functools.partial(jax.jit)
def sample_actions(
rng: PRNGKey,
actor_net: TrainState,
observations: np.ndarray,
temperature: float = 1.0) -> Tuple[PRNGKey, jnp.ndarray]:
"""Samples actions from a given policy
Args:
rng (PRNGKey): RNG
actor_net (TrainState): Actor network
observations (np.ndarray): Environment observation
temperature (float, optional): Temperature parameter. Defaults to 1.0.
Returns:
Tuple[PRNGKey, jnp.ndarray]: RNG and actions
"""
dist = actor_net.apply_fn(actor_net.params, observations, temperature)
rng, key = jax.random.split(rng)
return rng, dist.sample(seed=key)
| 29.6
| 80
| 0.727273
|
5e293fbf998bba3828307fb747310dbcc443e679
| 9
|
py
|
Python
|
packages/jsii-rosetta/test/translations/expressions/prefix_unary_expression.py
|
NGL321/jsii
|
a31ebf5ef676391d97f2286edc21e5859c38c96c
|
[
"Apache-2.0"
] | 1
|
2019-12-12T21:51:17.000Z
|
2019-12-12T21:51:17.000Z
|
packages/jsii-rosetta/test/translations/expressions/prefix_unary_expression.py
|
NGL321/jsii
|
a31ebf5ef676391d97f2286edc21e5859c38c96c
|
[
"Apache-2.0"
] | 424
|
2020-10-08T16:37:02.000Z
|
2022-03-30T22:04:41.000Z
|
packages/jsii-rosetta/test/translations/expressions/prefix_unary_expression.py
|
NGL321/jsii
|
a31ebf5ef676391d97f2286edc21e5859c38c96c
|
[
"Apache-2.0"
] | 1
|
2019-10-07T05:51:48.000Z
|
2019-10-07T05:51:48.000Z
|
print(-3)
| 9
| 9
| 0.666667
|
e7f9c58acd6f1ff48f4955a8c244b5dae489e040
| 2,953
|
py
|
Python
|
scripts/necklace/gui/simple_ui.py
|
r4inm4ker/neklace
|
93d7ee3d3b9017144fcda16a34959933b4a48a06
|
[
"Apache-2.0"
] | null | null | null |
scripts/necklace/gui/simple_ui.py
|
r4inm4ker/neklace
|
93d7ee3d3b9017144fcda16a34959933b4a48a06
|
[
"Apache-2.0"
] | null | null | null |
scripts/necklace/gui/simple_ui.py
|
r4inm4ker/neklace
|
93d7ee3d3b9017144fcda16a34959933b4a48a06
|
[
"Apache-2.0"
] | 1
|
2017-12-08T15:21:31.000Z
|
2017-12-08T15:21:31.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Jefri Haryono
# @Email : jefri.yeh@gmail.com
import pymel.core as pm
import necklace.lib as nlib
class NecklaceSimpleUI():
uiName = 'necklaceSimpleUI'
uiTitle = 'Necklace Simple UI'
necklaceName = 'testNecklaceNode'
tmpGrp = 'necklaceDupGrp'
def ui(self):
if pm.window(self.uiName, q=True, ex=True):
pm.deleteUI(self.uiName)
win = pm.window(self.uiName, title=self.uiTitle)
with win:
mainForm = pm.formLayout()
with mainForm:
hori = pm.horizontalLayout()
with hori:
pm.text(w=80, label='curve: ')
self.curveTF = pm.textField(w=80)
pm.button('<<<', w=40, command=pm.Callback(self.updateCurveTF))
hori.redistribute(0, 0, 0)
hori = pm.horizontalLayout()
with hori:
pm.text(w=80, label='geo: ')
self.geoTF = pm.textField(w=80)
pm.button('<<<', w=40, command=pm.Callback(self.updateGeoTF))
hori.redistribute(0, 0, 0)
hori = pm.horizontalLayout()
with hori:
pm.text(w=80, label='num sample: ')
self.numSampleIF = pm.intField(w=80, value=1)
hori.redistribute(0, 0)
pm.separator(h=10)
pm.button('create', command=pm.Callback(self.create))
mainForm.redistribute(*[0] * mainForm.getNumberOfChildren())
win.show()
@classmethod
def launch(cls):
inst = cls().ui()
def updateCurveTF(self):
sel = pm.ls(sl=1)
if sel:
curve = sel[0]
self.curveTF.setText(curve)
def updateGeoTF(self):
sel = pm.ls(sl=1)
if sel:
geo = sel[0]
self.geoTF.setText(geo)
def create(self):
curve = self.curveTF.getText()
geo = self.geoTF.getText()
if curve and geo:
curveNode = pm.PyNode(curve)
geoNode = pm.PyNode(geo)
if pm.objExists(self.necklaceName):
pm.delete(self.necklaceName)
if pm.objExists(self.tmpGrp):
pm.delete(self.tmpGrp)
necklaceNode = nlib.attachNecklace(curveNode)
pm.rename(necklaceNode, self.necklaceName)
numSample = self.numSampleIF.getValue()
necklaceNode.numSample.set(numSample)
tmpGrp = pm.createNode('transform', name=self.tmpGrp)
for idx in range(numSample):
geo = pm.duplicate(geoNode)[0]
pm.connectAttr(necklaceNode.outPosition[idx].outPosition, geo.translate)
pm.connectAttr(necklaceNode.outRotation[idx].outRotation, geo.rotate)
pm.parent(geo, tmpGrp)
def launch():
NecklaceSimpleUI.launch()
| 29.828283
| 88
| 0.540129
|
daf48954d7d70e4e13a788c69736a83c31cf30c3
| 17,921
|
py
|
Python
|
scripts/transforms.py
|
MichelML/ml-aging
|
b54470c00450da7d5b50e7be4a1f162f1c4b8531
|
[
"Apache-2.0"
] | 7
|
2019-07-08T06:24:53.000Z
|
2022-03-22T13:41:00.000Z
|
scripts/transforms.py
|
MichelML/ml-aging
|
b54470c00450da7d5b50e7be4a1f162f1c4b8531
|
[
"Apache-2.0"
] | null | null | null |
scripts/transforms.py
|
MichelML/ml-aging
|
b54470c00450da7d5b50e7be4a1f162f1c4b8531
|
[
"Apache-2.0"
] | 2
|
2019-08-19T13:43:49.000Z
|
2019-08-25T02:01:48.000Z
|
"""
This module includes modified versions of some Pytorch transforms functions to be able to
modify a sequence of images with the exact same transformations.
See https://pytorch.org/docs/stable/_modules/torchvision/transforms/transforms.html to see
The original methods.
"""
from __future__ import division
import torch
from torchvision.transforms import transforms
from torchvision.transforms.transforms import Lambda, Compose
from torchvision.transforms import functional as F
import math
import sys
import random
from PIL import Image
try:
import accimage
except ImportError:
accimage = None
import numpy as np
import numbers
import types
import collections
import warnings
if sys.version_info < (3, 3):
Sequence = collections.Sequence
Iterable = collections.Iterable
else:
Sequence = collections.abc.Sequence
Iterable = collections.abc.Iterable
SAMPLE_IMG = Image.open('./images/example_input.png')
class RandomHorizontalFlip(object):
"""Horizontally flip the given PIL Image randomly with a given probability.
Args:
p (float): probability of the image being flipped. Default value is 0.5
"""
def __init__(self, randprob=random.random(), p=0.5):
self.randprob = randprob
self.p = p
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be flipped.
Returns:
PIL Image: Randomly flipped image.
"""
if self.randprob < self.p:
return F.hflip(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class RandomVerticalFlip(object):
"""Vertically flip the given PIL Image randomly with a given probability.
Args:
p (float): probability of the image being flipped. Default value is 0.5
"""
def __init__(self, randprob=random.random(), p=0.5):
self.randprob = randprob
self.p = p
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be flipped.
Returns:
PIL Image: Randomly flipped image.
"""
if self.randprob < self.p:
return F.vflip(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class ColorJitter(object):
"""Randomly change the brightness, contrast and saturation of an image.
Args:
brightness (float or tuple of float (min, max)): How much to jitter brightness.
brightness_factor is chosen uniformly from [max(0, 1 - brightness), 1 + brightness]
or the given [min, max]. Should be non negative numbers.
contrast (float or tuple of float (min, max)): How much to jitter contrast.
contrast_factor is chosen uniformly from [max(0, 1 - contrast), 1 + contrast]
or the given [min, max]. Should be non negative numbers.
saturation (float or tuple of float (min, max)): How much to jitter saturation.
saturation_factor is chosen uniformly from [max(0, 1 - saturation), 1 + saturation]
or the given [min, max]. Should be non negative numbers.
hue (float or tuple of float (min, max)): How much to jitter hue.
hue_factor is chosen uniformly from [-hue, hue] or the given [min, max].
Should have 0<= hue <= 0.5 or -0.5 <= min <= max <= 0.5.
"""
def __init__(self, brightness=0, contrast=0, saturation=0, hue=0):
self.brightness = self._check_input(brightness, 'brightness')
self.contrast = self._check_input(contrast, 'contrast')
self.saturation = self._check_input(saturation, 'saturation')
self.hue = self._check_input(hue, 'hue', center=0, bound=(-0.5, 0.5),
clip_first_on_zero=False)
self.transforms = self.get_params(self.brightness, self.contrast, self.saturation, self.hue)
def _check_input(self, value, name, center=1, bound=(0, float('inf')), clip_first_on_zero=True):
if isinstance(value, numbers.Number):
if value < 0:
raise ValueError("If {} is a single number, it must be non negative.".format(name))
value = [center - value, center + value]
if clip_first_on_zero:
value[0] = max(value[0], 0)
elif isinstance(value, (tuple, list)) and len(value) == 2:
if not bound[0] <= value[0] <= value[1] <= bound[1]:
raise ValueError("{} values should be between {}".format(name, bound))
else:
raise TypeError("{} should be a single number or a list/tuple with lenght 2.".format(name))
# if value is 0 or (1., 1.) for brightness/contrast/saturation
# or (0., 0.) for hue, do nothing
if value[0] == value[1] == center:
value = None
return value
@staticmethod
def get_params(brightness, contrast, saturation, hue):
"""Get a randomized transform to be applied on image.
Arguments are same as that of __init__.
Returns:
Transform which randomly adjusts brightness, contrast and
saturation in a random order.
"""
transforms = []
if brightness is not None:
brightness_factor = random.uniform(brightness[0], brightness[1])
transforms.append(Lambda(lambda img: F.adjust_brightness(img, brightness_factor)))
if contrast is not None:
contrast_factor = random.uniform(contrast[0], contrast[1])
transforms.append(Lambda(lambda img: F.adjust_contrast(img, contrast_factor)))
if saturation is not None:
saturation_factor = random.uniform(saturation[0], saturation[1])
transforms.append(Lambda(lambda img: F.adjust_saturation(img, saturation_factor)))
if hue is not None:
hue_factor = random.uniform(hue[0], hue[1])
transforms.append(Lambda(lambda img: F.adjust_hue(img, hue_factor)))
random.shuffle(transforms)
transform = Compose(transforms)
return transform
def __call__(self, img):
"""
Args:
img (PIL Image): Input image.
Returns:
PIL Image: Color jittered image.
"""
return self.transforms(img)
def __repr__(self):
format_string = self.__class__.__name__ + '('
format_string += 'brightness={0}'.format(self.brightness)
format_string += ', contrast={0}'.format(self.contrast)
format_string += ', saturation={0}'.format(self.saturation)
format_string += ', hue={0})'.format(self.hue)
return format_string
class RandomResizedCrop(object):
"""Crop the given PIL Image to random size and aspect ratio.
A crop of random size (default: of 0.08 to 1.0) of the original size and a random
aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio is made. This crop
is finally resized to given size.
This is popularly used to train the Inception networks.
Args:
size: expected output size of each edge
scale: range of size of the origin size cropped
ratio: range of aspect ratio of the origin aspect ratio cropped
interpolation: Default: PIL.Image.BILINEAR
"""
def __init__(self, size, scale=(0.85, .90), ratio=(3. / 4., 4. / 3.), interpolation=Image.BILINEAR):
if isinstance(size, tuple):
self.size = size
else:
self.size = (size, size)
if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):
warnings.warn("range should be of kind (min, max)")
self.interpolation = interpolation
self.scale = scale
self.ratio = ratio
self.params = self.get_params(SAMPLE_IMG, self.scale, self.ratio)
@staticmethod
def get_params(img, scale, ratio):
"""Get parameters for ``crop`` for a random sized crop.
Args:
img (PIL Image): Image to be cropped.
scale (tuple): range of size of the origin size cropped
ratio (tuple): range of aspect ratio of the origin aspect ratio cropped
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop.
"""
area = img.size[0] * img.size[1]
for attempt in range(10):
target_area = random.uniform(*scale) * area
log_ratio = (math.log(ratio[0]), math.log(ratio[1]))
aspect_ratio = math.exp(random.uniform(*log_ratio))
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if w <= img.size[0] and h <= img.size[1]:
i = random.randint(0, img.size[1] - h)
j = random.randint(0, img.size[0] - w)
return i, j, h, w
# Fallback to central crop
in_ratio = img.size[0] / img.size[1]
if (in_ratio < min(ratio)):
w = img.size[0]
h = int(round(w / min(ratio)))
elif (in_ratio > max(ratio)):
h = img.size[1]
w = int(round(h * max(ratio)))
else: # whole image
w = img.size[0]
h = img.size[1]
i = (img.size[1] - h) // 2
j = (img.size[0] - w) // 2
return i, j, h, w
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be cropped and resized.
Returns:
PIL Image: Randomly cropped and resized image.
"""
i, j, h, w = self.params
return F.resized_crop(img, i, j, h, w, self.size, self.interpolation)
def __repr__(self):
interpolate_str = _pil_interpolation_to_str[self.interpolation]
format_string = self.__class__.__name__ + '(size={0}'.format(self.size)
format_string += ', scale={0}'.format(tuple(round(s, 4) for s in self.scale))
format_string += ', ratio={0}'.format(tuple(round(r, 4) for r in self.ratio))
format_string += ', interpolation={0})'.format(interpolate_str)
return format_string
class RandomCrop(object):
"""Crop the given PIL Image at a random location.
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (h, w), a square crop (size, size) is
made.
padding (int or sequence, optional): Optional padding on each border
of the image. Default is None, i.e no padding. If a sequence of length
4 is provided, it is used to pad left, top, right, bottom borders
respectively. If a sequence of length 2 is provided, it is used to
pad left/right, top/bottom borders, respectively.
pad_if_needed (boolean): It will pad the image if smaller than the
desired size to avoid raising an exception. Since cropping is done
after padding, the padding seems to be done at a random offset.
fill: Pixel fill value for constant fill. Default is 0. If a tuple of
length 3, it is used to fill R, G, B channels respectively.
This value is only used when the padding_mode is constant
padding_mode: Type of padding. Should be: constant, edge, reflect or symmetric. Default is constant.
- constant: pads with a constant value, this value is specified with fill
- edge: pads with the last value on the edge of the image
- reflect: pads with reflection of image (without repeating the last value on the edge)
padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
will result in [3, 2, 1, 2, 3, 4, 3, 2]
- symmetric: pads with reflection of image (repeating the last value on the edge)
padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
will result in [2, 1, 1, 2, 3, 4, 4, 3]
"""
def __init__(self, size, padding=None, pad_if_needed=False, fill=0, padding_mode='constant'):
if isinstance(size, numbers.Number):
self.size = (int(size), int(size))
else:
self.size = size
self.padding = padding
self.pad_if_needed = pad_if_needed
self.fill = fill
self.padding_mode = padding_mode
self.params = self.get_params(SAMPLE_IMG, self.size)
@staticmethod
def get_params(img, output_size):
"""Get parameters for ``crop`` for a random crop.
Args:
img (PIL Image): Image to be cropped.
output_size (tuple): Expected output size of the crop.
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for random crop.
"""
w, h = img.size
th, tw = output_size
if w == tw and h == th:
return 0, 0, h, w
i = random.randint(0, h - th)
j = random.randint(0, w - tw)
return i, j, th, tw
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be cropped.
Returns:
PIL Image: Cropped image.
"""
if self.padding is not None:
img = F.pad(img, self.padding, self.fill, self.padding_mode)
# pad the width if needed
if self.pad_if_needed and img.size[0] < self.size[1]:
img = F.pad(img, (self.size[1] - img.size[0], 0), self.fill, self.padding_mode)
# pad the height if needed
if self.pad_if_needed and img.size[1] < self.size[0]:
img = F.pad(img, (0, self.size[0] - img.size[1]), self.fill, self.padding_mode)
i, j, h, w = self.params
return F.crop(img, i, j, h, w)
def __repr__(self):
return self.__class__.__name__ + '(size={0}, padding={1})'.format(self.size, self.padding)
class RandomRotation(object):
"""Rotate the image by angle.
Args:
degrees (sequence or float or int): Range of degrees to select from.
If degrees is a number instead of sequence like (min, max), the range of degrees
will be (-degrees, +degrees).
resample ({PIL.Image.NEAREST, PIL.Image.BILINEAR, PIL.Image.BICUBIC}, optional):
An optional resampling filter. See `filters`_ for more information.
If omitted, or if the image has mode "1" or "P", it is set to PIL.Image.NEAREST.
expand (bool, optional): Optional expansion flag.
If true, expands the output to make it large enough to hold the entire rotated image.
If false or omitted, make the output image the same size as the input image.
Note that the expand flag assumes rotation around the center and no translation.
center (2-tuple, optional): Optional center of rotation.
Origin is the upper left corner.
Default is the center of the image.
.. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters
"""
def __init__(self, degrees, resample=False, expand=False, center=None):
if isinstance(degrees, numbers.Number):
if degrees < 0:
raise ValueError("If degrees is a single number, it must be positive.")
self.degrees = (-degrees, degrees)
else:
if len(degrees) != 2:
raise ValueError("If degrees is a sequence, it must be of len 2.")
self.degrees = degrees
self.resample = resample
self.expand = expand
self.center = center
self.angle = self.get_params(self.degrees)
@staticmethod
def get_params(degrees):
"""Get parameters for ``rotate`` for a random rotation.
Returns:
sequence: params to be passed to ``rotate`` for random rotation.
"""
angle = random.uniform(degrees[0], degrees[1])
return angle
def __call__(self, img):
"""
Args:
img (PIL Image): Image to be rotated.
Returns:
PIL Image: Rotated image.
"""
return F.rotate(img, self.angle, self.resample, self.expand, self.center)
def __repr__(self):
format_string = self.__class__.__name__ + '(degrees={0}'.format(self.degrees)
format_string += ', resample={0}'.format(self.resample)
format_string += ', expand={0}'.format(self.expand)
if self.center is not None:
format_string += ', center={0}'.format(self.center)
format_string += ')'
return format_string
def gen_transform_train_wo_norm(crop_size=512, jitter=(0.6, 1.4), degrees=90.):
randprob_h = random.random()
randprob_v = random.random()
return transforms.Compose([
RandomRotation(degrees),
RandomResizedCrop(crop_size),
ColorJitter(brightness=jitter, contrast=jitter, saturation=jitter, hue=.1),
RandomHorizontalFlip(randprob=randprob_h),
RandomVerticalFlip(randprob=randprob_v),
])
def gen_transform_train(crop_size=448, jitter=(0.6, 1.4)):
randprob_h = random.random()
randprob_v = random.random()
return transforms.Compose([
RandomResizedCrop(crop_size),
ColorJitter(brightness=jitter, contrast=jitter, saturation=jitter, hue=.1),
RandomHorizontalFlip(randprob=randprob_h),
RandomVerticalFlip(randprob=randprob_v),
# PCA Noise should go here,
transforms.ToTensor(),
transforms.Normalize(mean=[0.5], std=[0.5])
])
def gen_transform_validation(crop_size=448):
return transforms.Compose([
transforms.CenterCrop(crop_size),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5], std=[0.5])
])
def gen_transform_test_multi(crop_size=448):
return transforms.Compose([
transforms.RandomCrop(crop_size),
transforms.ToTensor(),
transforms.Normalize(mean=[0.5], std=[0.5])
])
| 37.335417
| 108
| 0.612131
|
ef7a649c9b7477c44a349f837170b1dcb28520d3
| 3,340
|
py
|
Python
|
src/homework/d_repetition/main.py
|
acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-ChristianHamilton77
|
ee6156890c5ca581fe1e18cec58f5bbd4010d088
|
[
"MIT"
] | null | null | null |
src/homework/d_repetition/main.py
|
acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-ChristianHamilton77
|
ee6156890c5ca581fe1e18cec58f5bbd4010d088
|
[
"MIT"
] | null | null | null |
src/homework/d_repetition/main.py
|
acc-cosc-1336-spring-2022/acc-cosc-1336-spring-2022-ChristianHamilton77
|
ee6156890c5ca581fe1e18cec58f5bbd4010d088
|
[
"MIT"
] | null | null | null |
#
import repetition
#print(repetition.sum_odd_numbers(9))
#print(repetition.get_factorial(4))
#/////// ~f
def dblChk(run,message):
while run == True:
#print('run = ',run,"\n")
display = input(message)
#validate
if display == 'y' or display == 'Y' or display == 'n' or display == 'N':
if display == 'N' or display == 'n':
menu_call(True)
run = False
else:
run = False
break
else:
print('Please enter either Y or N. ')
run = True
#Define Menu
def menu_call(menu_run):
#print('Menu_run = ',menu_run,"\n")
while menu_run == True:
menu = input("Homework 3 Menu \n 1-Factorial \n 2-Sum odd numbers \n 3-Exit \n")
#validate input
# !is Number -> Error message --> Call Menu
if menu.isnumeric():
#is Number
#convert Number to int
menu_choice = int(menu)
if menu_choice >=1 and menu_choice <= 3:#is between 1 & 3 -> NEXT
menu_run = False
#print('Menu_run = ',menu_run,"\n")
else:#!is between 1-3 -> error message -->Call Menu
print('Please Select a Number between 1 and 3.\n')
menu_run = True
else:
print('Please Select a Number.\n')
menu_run = True
#print('menu_choice = ',menu_choice," \n")
#if Selector
selector_filter(menu_choice)
def selector_filter(menu_choice):
# is 3
#db chk - input('are you sure you wouldlike to exit? ')
if menu_choice == 1 or menu_choice == 2:#is 1 or 2 - > Prompt for number to use in ~f
num = input('Enter a number to process.\n')
#vaidate Response
if num.isnumeric():#is Number
num_choice = int(num)#convert Number to int
#print(num_choice)
#if choice is 1
if menu_choice == 1:
#is between 0 & 10 -> NEXT
if num_choice >= 0 and num_choice <= 10:
print(repetition.get_factorial(num_choice))#call ~f get_factorial
dblChk(True, 'Do you want to exit? ')
#!is between 0 & 10 -> error message -->Call Menu
elif num_choice > 10:
print('Please select a Number between 0 and 10.\n')
menu_call(True)
#if choice is 2
elif menu_choice == 2:
#is between 0 & 100 -> NEXT
if num_choice >= 0 and num_choice <= 100:
print(repetition.sum_odd_numbers(num_choice))#call ~f Sum_odd_numbers
dblChk(True, 'Do you want to exit? ')
#!is between 0 & 100 -> error message -->Call Menu
elif num_choice > 100:
print('Please select a Number between 0 and 100.\n')
menu_call(True)
# !is Number -> Error message --> Call Menu
else:
print('Please Select a Number.\n')
menu_call(True)
if menu_choice == 3:
dblChk(True,'Are you sure you want to exit? ')
# else:
# print('idk how this happened.')
#/////// main prog
#Call menu
menu_call(True)
| 31.809524
| 89
| 0.508683
|
16a1e0a7154bd12dc10f2e1e307b16113907a037
| 1,753
|
py
|
Python
|
gryphon/core/operations/bash_utils.py
|
ow-gryphon/gryphon
|
0b34f2f61a50af46b9d1ec1d3c15d53cf4055dd5
|
[
"MIT"
] | null | null | null |
gryphon/core/operations/bash_utils.py
|
ow-gryphon/gryphon
|
0b34f2f61a50af46b9d1ec1d3c15d53cf4055dd5
|
[
"MIT"
] | 1
|
2022-03-08T14:54:26.000Z
|
2022-03-08T15:02:52.000Z
|
gryphon/core/operations/bash_utils.py
|
ow-gryphon/gryphon
|
0b34f2f61a50af46b9d1ec1d3c15d53cf4055dd5
|
[
"MIT"
] | null | null | null |
import errno
import logging
import os
import shutil
import stat
from pathlib import Path
logger = logging.getLogger('gryphon')
def on_error(func, path, exc):
value = exc[1] # os.rmdir
if func in (os.unlink, os.remove) and value.errno == errno.EACCES:
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # 0777
try:
func(path)
except PermissionError:
logger.error(f"Permission error on {path}. Something might go wrong.")
else:
if func == os.rmdir:
shutil.rmtree(path)
return
raise RuntimeError("File permission error.")
class BashUtils:
@staticmethod
def remove_folder(folder: Path):
"""
Removes a folder (location relative to cwd or absolute).
"""
shutil.rmtree(folder, ignore_errors=False, onerror=on_error)
@staticmethod
def create_folder(folder: Path):
"""
Create a folder in the given path (location relative to cwd or absolute).
"""
folder.mkdir(exist_ok=True)
@staticmethod
def copy_project_template(template_source: Path, template_destiny: Path):
"""Copies the templates to destination folder."""
template_path = template_source / "template"
template_path.mkdir(exist_ok=True)
shutil.copytree(
src=template_path,
dst=rf'{str(template_destiny)}',
dirs_exist_ok=True
)
@staticmethod
def execute_and_log(command) -> tuple:
logger.debug(f"command: {command}")
cmd = os.popen(command)
output = cmd.read()
for line in output.split('\n'):
logger.debug(line)
# status code
return cmd.close(), output
| 26.969231
| 82
| 0.615516
|
4fe5ae92f697b3802e6a0f846877561ac914651d
| 5,173
|
py
|
Python
|
openstackclient/volume/v3/volume_message.py
|
mydevice/python-openstackclient
|
4891bb38208fdcd1a2ae60e47b056841e14fbdf7
|
[
"Apache-2.0"
] | 262
|
2015-01-29T20:10:49.000Z
|
2022-03-23T01:59:23.000Z
|
openstackclient/volume/v3/volume_message.py
|
mydevice/python-openstackclient
|
4891bb38208fdcd1a2ae60e47b056841e14fbdf7
|
[
"Apache-2.0"
] | 5
|
2015-01-21T02:37:35.000Z
|
2021-11-23T02:26:00.000Z
|
openstackclient/volume/v3/volume_message.py
|
mydevice/python-openstackclient
|
4891bb38208fdcd1a2ae60e47b056841e14fbdf7
|
[
"Apache-2.0"
] | 194
|
2015-01-08T07:39:27.000Z
|
2022-03-30T13:51:23.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Volume V3 Messages implementations"""
import logging as LOG
from cinderclient import api_versions
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.i18n import _
from openstackclient.identity import common as identity_common
class DeleteMessage(command.Command):
_description = _('Delete a volume failure message')
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.add_argument(
'message_ids',
metavar='<message-id>',
nargs='+',
help=_('Message(s) to delete (ID)')
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
if volume_client.api_version < api_versions.APIVersion('3.3'):
msg = _(
"--os-volume-api-version 3.3 or greater is required to "
"support the 'volume message delete' command"
)
raise exceptions.CommandError(msg)
errors = 0
for message_id in parsed_args.message_ids:
try:
volume_client.messages.delete(message_id)
except Exception:
LOG.error(_('Failed to delete message: %s'), message_id)
errors += 1
if errors > 0:
total = len(parsed_args.message_ids)
msg = _('Failed to delete %(errors)s of %(total)s messages.') % {
'errors': errors, 'total': total,
}
raise exceptions.CommandError(msg)
class ListMessages(command.Lister):
_description = _('List volume failure messages')
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.add_argument(
'--project',
metavar='<project>',
help=_('Filter results by project (name or ID) (admin only)'),
)
identity_common.add_project_domain_option_to_parser(parser)
parser.add_argument(
'--marker',
metavar='<message-id>',
help=_('The last message ID of the previous page'),
default=None,
)
parser.add_argument(
'--limit',
type=int,
metavar='<limit>',
help=_('Maximum number of messages to display'),
default=None,
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
identity_client = self.app.client_manager.identity
if volume_client.api_version < api_versions.APIVersion('3.3'):
msg = _(
"--os-volume-api-version 3.3 or greater is required to "
"support the 'volume message list' command"
)
raise exceptions.CommandError(msg)
column_headers = (
'ID',
'Event ID',
'Resource Type',
'Resource UUID',
'Message Level',
'User Message',
'Request ID',
'Created At',
'Guaranteed Until',
)
project_id = None
if parsed_args.project:
project_id = identity_common.find_project(
identity_client,
parsed_args.project,
parsed_args.project_domain).id
search_opts = {
'project_id': project_id,
}
data = volume_client.messages.list(
search_opts=search_opts,
marker=parsed_args.marker,
limit=parsed_args.limit)
return (
column_headers,
(utils.get_item_properties(s, column_headers) for s in data)
)
class ShowMessage(command.ShowOne):
_description = _('Show a volume failure message')
def get_parser(self, prog_name):
parser = super(ShowMessage, self).get_parser(prog_name)
parser.add_argument(
'message_id',
metavar='<message-id>',
help=_('Message to show (ID).')
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
if volume_client.api_version < api_versions.APIVersion('3.3'):
msg = _(
"--os-volume-api-version 3.3 or greater is required to "
"support the 'volume message show' command"
)
raise exceptions.CommandError(msg)
message = volume_client.messages.get(parsed_args.message_id)
return zip(*sorted(message._info.items()))
| 31.162651
| 77
| 0.593273
|
45b78f024c449f52fdb3ae8ed4377e61bdb40080
| 5,331
|
py
|
Python
|
airflow/providers/amazon/aws/transfers/mysql_to_s3.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 79
|
2021-10-15T07:32:27.000Z
|
2022-03-28T04:10:19.000Z
|
airflow/providers/amazon/aws/transfers/mysql_to_s3.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 153
|
2021-10-15T05:23:46.000Z
|
2022-02-23T06:07:10.000Z
|
airflow/providers/amazon/aws/transfers/mysql_to_s3.py
|
emilioego/airflow
|
3457c7847cd24413ff5b622e65c27d8370f94502
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 23
|
2021-10-15T02:36:37.000Z
|
2022-03-17T02:59:27.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from tempfile import NamedTemporaryFile
from typing import Optional, Union
import numpy as np
import pandas as pd
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.amazon.aws.hooks.s3 import S3Hook
from airflow.providers.mysql.hooks.mysql import MySqlHook
from airflow.utils.decorators import apply_defaults
class MySQLToS3Operator(BaseOperator):
"""
Saves data from an specific MySQL query into a file in S3.
:param query: the sql query to be executed. If you want to execute a file, place the absolute path of it,
ending with .sql extension. (templated)
:type query: str
:param s3_bucket: bucket where the data will be stored. (templated)
:type s3_bucket: str
:param s3_key: desired key for the file. It includes the name of the file. (templated)
:type s3_key: str
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: str
:param aws_conn_id: reference to a specific S3 connection
:type aws_conn_id: str
:param verify: Whether or not to verify SSL certificates for S3 connection.
By default SSL certificates are verified.
You can provide the following values:
- ``False``: do not validate SSL certificates. SSL will still be used
(unless use_ssl is False), but SSL certificates will not be verified.
- ``path/to/cert/bundle.pem``: A filename of the CA cert bundle to uses.
You can specify this argument if you want to use a different
CA cert bundle than the one used by botocore.
:type verify: bool or str
:param pd_csv_kwargs: arguments to include in pd.to_csv (header, index, columns...)
:type pd_csv_kwargs: dict
:param index: whether to have the index or not in the dataframe
:type index: str
:param header: whether to include header or not into the S3 file
:type header: bool
"""
template_fields = (
's3_bucket',
's3_key',
'query',
)
template_ext = ('.sql',)
@apply_defaults
def __init__(
self,
*,
query: str,
s3_bucket: str,
s3_key: str,
mysql_conn_id: str = 'mysql_default',
aws_conn_id: str = 'aws_default',
verify: Optional[Union[bool, str]] = None,
pd_csv_kwargs: Optional[dict] = None,
index: bool = False,
header: bool = False,
**kwargs,
) -> None:
super().__init__(**kwargs)
self.query = query
self.s3_bucket = s3_bucket
self.s3_key = s3_key
self.mysql_conn_id = mysql_conn_id
self.aws_conn_id = aws_conn_id
self.verify = verify
self.pd_csv_kwargs = pd_csv_kwargs or {}
if "path_or_buf" in self.pd_csv_kwargs:
raise AirflowException('The argument path_or_buf is not allowed, please remove it')
if "index" not in self.pd_csv_kwargs:
self.pd_csv_kwargs["index"] = index
if "header" not in self.pd_csv_kwargs:
self.pd_csv_kwargs["header"] = header
def _fix_int_dtypes(self, df: pd.DataFrame) -> None:
"""Mutate DataFrame to set dtypes for int columns containing NaN values."""
for col in df:
if "float" in df[col].dtype.name and df[col].hasnans:
# inspect values to determine if dtype of non-null values is int or float
notna_series = df[col].dropna().values
if np.isclose(notna_series, notna_series.astype(int)).all():
# set to dtype that retains integers and supports NaNs
df[col] = np.where(df[col].isnull(), None, df[col])
df[col] = df[col].astype(pd.Int64Dtype())
def execute(self, context) -> None:
mysql_hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
s3_conn = S3Hook(aws_conn_id=self.aws_conn_id, verify=self.verify)
data_df = mysql_hook.get_pandas_df(self.query)
self.log.info("Data from MySQL obtained")
self._fix_int_dtypes(data_df)
with NamedTemporaryFile(mode='r+', suffix='.csv') as tmp_csv:
data_df.to_csv(tmp_csv.name, **self.pd_csv_kwargs)
s3_conn.load_file(filename=tmp_csv.name, key=self.s3_key, bucket_name=self.s3_bucket)
if s3_conn.check_for_key(self.s3_key, bucket_name=self.s3_bucket):
file_location = os.path.join(self.s3_bucket, self.s3_key)
self.log.info("File saved correctly in %s", file_location)
| 41.325581
| 109
| 0.672669
|
52eeb394830606375c0ab62c69ace2c3897ed19d
| 389
|
py
|
Python
|
misago/misago/conf/staticsettings.py
|
vascoalramos/misago-deployment
|
20226072138403108046c0afad9d99eb4163cedc
|
[
"MIT"
] | 2
|
2021-03-06T21:06:13.000Z
|
2021-03-09T15:05:12.000Z
|
misago/misago/conf/staticsettings.py
|
vascoalramos/misago-deployment
|
20226072138403108046c0afad9d99eb4163cedc
|
[
"MIT"
] | null | null | null |
misago/misago/conf/staticsettings.py
|
vascoalramos/misago-deployment
|
20226072138403108046c0afad9d99eb4163cedc
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from . import defaults
class StaticSettings:
def __getattr__(self, name):
try:
return getattr(settings, name)
except AttributeError:
pass
try:
return getattr(defaults, name)
except AttributeError:
pass
raise AttributeError("%s setting is not defined" % name)
| 20.473684
| 64
| 0.601542
|
28dad0395aa2df7ae897d8f355cedccbf777859d
| 324
|
py
|
Python
|
forkplot3d/parameters.py
|
raalesir/forkplot3d
|
068fb2472870cfc57201eb7c738afa124931f56b
|
[
"MIT"
] | null | null | null |
forkplot3d/parameters.py
|
raalesir/forkplot3d
|
068fb2472870cfc57201eb7c738afa124931f56b
|
[
"MIT"
] | null | null | null |
forkplot3d/parameters.py
|
raalesir/forkplot3d
|
068fb2472870cfc57201eb7c738afa124931f56b
|
[
"MIT"
] | null | null | null |
"""
parameters for system and simulation
"""
import numpy as np
l_0 = 0.7 # the "equilibrium" length of the bond between nodes of the same size bin
number_of_size_bins = 10 # the number of size bins
number_of_histogram_bins = np.linspace(0,5,30) #20 # number of bins for histogram for each node
PLOT_ROOT ='plots'
| 19.058824
| 95
| 0.734568
|
d1a15b9094bc7fa13330eef75f713c369496ef05
| 2,829
|
py
|
Python
|
twitchbot/database/models.py
|
jostster/PythonTwitchBotFramework
|
931fdac9226b0086b37a011fd7c0265580c87ef0
|
[
"MIT"
] | null | null | null |
twitchbot/database/models.py
|
jostster/PythonTwitchBotFramework
|
931fdac9226b0086b37a011fd7c0265580c87ef0
|
[
"MIT"
] | null | null | null |
twitchbot/database/models.py
|
jostster/PythonTwitchBotFramework
|
931fdac9226b0086b37a011fd7c0265580c87ef0
|
[
"MIT"
] | null | null | null |
from asyncio import Future, Task
from sqlalchemy import Column, Integer, String, Float, Boolean
from ..config import cfg
from ..enums import CommandContext
from .session import Base, database_init
__all__ = ('Quote', 'CustomCommand')
class Quote(Base):
__tablename__ = 'quotes'
id = Column(Integer, primary_key=True, nullable=False)
user = Column(String)
channel = Column(String, nullable=False)
alias = Column(String)
value = Column(String, nullable=False)
@classmethod
def create(cls, channel: str, value: str, user: str = None, alias: str = None):
return Quote(channel=channel.lower(), user=user, value=value, alias=alias)
class CustomCommand(Base):
__tablename__ = 'commands'
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String, nullable=False)
channel = Column(String, nullable=False)
response = Column(String, nullable=False)
context = CommandContext.CHANNEL
permission = None
@classmethod
def create(cls, channel: str, name: str, response: str):
return CustomCommand(channel=channel.lower(), name=name.lower(), response=response)
@property
def fullname(self):
return self.name
def __str__(self):
return f'<CustomCommand channel={self.channel!r} name={self.name!r} response={self.response!r}>'
class Balance(Base):
__tablename__ = 'balance'
id = Column(Integer, nullable=False, primary_key=True)
channel = Column(String, nullable=False)
user = Column(String, nullable=False)
balance = Column(Integer, nullable=False)
@classmethod
def create(cls, channel: str, user: str, balance: int = cfg.default_balance):
return Balance(channel=channel.lower(), user=user, balance=balance)
class CurrencyName(Base):
__tablename__ = 'currency_names'
id = Column(Integer, nullable=False, primary_key=True)
channel = Column(String, nullable=False)
name = Column(String, nullable=False)
@classmethod
def create(cls, channel: str, name: str):
return CurrencyName(channel=channel.lower(), name=name)
class MessageTimer(Base):
__tablename__ = 'message_timers'
id = Column(Integer, nullable=False, primary_key=True)
name = Column(String, nullable=False)
channel = Column(String, nullable=False)
message = Column(String, nullable=False)
interval = Column(Float, nullable=False)
active = Column(Boolean, nullable=False, default=False)
task: Task = None
@property
def running(self):
return self.task is not None and not self.task.done()
@classmethod
def create(cls, channel: str, name: str, message: str, interval: float, active=False):
return MessageTimer(name=name, channel=channel, message=message, interval=interval, active=active)
database_init()
| 30.095745
| 106
| 0.699187
|
ed16ec3f21338900892241d0f06e54dd4f1fb83e
| 2,032
|
py
|
Python
|
main.py
|
EmanuelAngelo/emailporvoz
|
925b37c57106964dbd38bc7acf700d968ee75846
|
[
"MIT"
] | null | null | null |
main.py
|
EmanuelAngelo/emailporvoz
|
925b37c57106964dbd38bc7acf700d968ee75846
|
[
"MIT"
] | null | null | null |
main.py
|
EmanuelAngelo/emailporvoz
|
925b37c57106964dbd38bc7acf700d968ee75846
|
[
"MIT"
] | null | null | null |
import pyttsx3
import speech_recognition as speech
import smtplib
from email.message import EmailMessage
motor = pyttsx3.init()
def jarvisPobreFalar(mensagem): #converter texto em voz
motor.say(mensagem)
motor.runAndWait()
# jarvisPobreFalar("Por quê Emanuel é lindo?")
def jarvisPobreOuvir():
reconhecer = speech.Recognizer()
try:
with speech.Microphone() as microfone:
print("Estou ouvindo você.")
audio = reconhecer.listen(microfone)
texto = reconhecer.recognize_google(audio, language="pt-BR")
print(texto)
return texto.lower()
except:
pass
#jarvisPobreOuvir()
def jarvisPobreEnviarEmail(destino, assunto, mensagem):
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login('seuemail@gmail.com', 'seusenha')
email = EmailMessage()
email['From'] = 'seuemail@gmail.com'
email['To'] = destino
email['Subject'] = assunto
email.set_content(mensagem)
server.send_message(email)
print("E-mail enviado com sucesso!")
def main():
jarvisPobreFalar("Olá, vamos começar.")
jarvisPobreFalar("Olá, irei imprimir sua lista de e-mail.")
lista_email = {
'mãe': 'mdas71@icloud.com',
'namorada': 'silvaniacoutinho19@gmail.com',
'paulo': 'pumbadeveloper@gmail.com'
}
print(lista_email)
jarvisPobreFalar("Para quem voce deseja enviar este e-mail?")
contato = jarvisPobreOuvir()
try:
if lista_email[contato]:
jarvisPobreFalar(f"Qual assunto, voce deseja enviar para {lista_email[contato]} ?")
assunto = jarvisPobreOuvir()
jarvisPobreFalar("qual mensagem ?")
mensagem = jarvisPobreOuvir()
if contato and assunto and mensagem:
jarvisPobreEnviarEmail(lista_email[contato], assunto, mensagem)
except:
jarvisPobreFalar("Desculpe, ocorreu um erro. Tente novamente")
main()
| 30.787879
| 96
| 0.639272
|
05858f9f3937539515db07acf38e749723f48719
| 1,036
|
py
|
Python
|
secret auction.py
|
claudialsm/mini-projects
|
d0d3033074dee1ee94abb01fae2365371e5a5285
|
[
"MIT"
] | null | null | null |
secret auction.py
|
claudialsm/mini-projects
|
d0d3033074dee1ee94abb01fae2365371e5a5285
|
[
"MIT"
] | null | null | null |
secret auction.py
|
claudialsm/mini-projects
|
d0d3033074dee1ee94abb01fae2365371e5a5285
|
[
"MIT"
] | null | null | null |
"""Secret Auction Program
The program secretly asks each participant to state their max bid, and chooses the max winner once
all bids have been received. """
from os import system
def ask_bid():
# Secret individual bid session
name = input("What is your name?: ")
amount = int(input("What is your highest bid($)?: "))
return name, amount
def compile_bid():
bids = {}
bids_incomplete = True
while bids_incomplete:
bidder_name, bidder_amount = ask_bid()
bids[bidder_name] = bidder_amount
other_users = input("Are there other users who want to bid? Type 'yes' for yes and 'no' for no: ").lower()
system('clear')
if other_users == 'yes':
continue
elif other_users == 'no':
highest_bid = max(bids.values())
highest_bidder = [k for k, v in bids.items() if v == highest_bid]
print(f"{highest_bidder[0]} won the bid at ${highest_bid}!")
break
if __name__ == '__main__':
compile_bid()
| 31.393939
| 114
| 0.616795
|
f6171ffb69722daef8bbc02dae4f5f0dfd762498
| 821
|
py
|
Python
|
nectr/chat/migrations/0003_auto_20170511_0436.py
|
FarmingdaleTUTR/nectr
|
39b6e2b65bc9d9b1877f1b7c31258b2558fff371
|
[
"MIT"
] | 1
|
2017-05-07T11:40:22.000Z
|
2017-05-07T11:40:22.000Z
|
nectr/chat/migrations/0003_auto_20170511_0436.py
|
FarmingdaleTUTR/nectr
|
39b6e2b65bc9d9b1877f1b7c31258b2558fff371
|
[
"MIT"
] | 83
|
2017-03-17T15:00:02.000Z
|
2017-05-08T02:59:32.000Z
|
nectr/chat/migrations/0003_auto_20170511_0436.py
|
FarmingdaleTUTR/nectr
|
39b6e2b65bc9d9b1877f1b7c31258b2558fff371
|
[
"MIT"
] | 2
|
2017-04-04T22:54:16.000Z
|
2017-05-07T05:51:38.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-11 04:36
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('chat', '0002_auto_20170510_1535'),
]
operations = [
migrations.AlterField(
model_name='conversation',
name='initiator_last_read_time',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 11, 4, 36, 7, 169662, tzinfo=utc)),
),
migrations.AlterField(
model_name='conversation',
name='recipient_last_read_time',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 11, 4, 36, 7, 169700, tzinfo=utc)),
),
]
| 29.321429
| 109
| 0.644336
|
eb705d20625f3e62bfb25e31c4f1c293e328bc94
| 7,265
|
py
|
Python
|
jsjy/models.py
|
Qin6468/Schoolmanagement
|
8991bd12069d249156ced89a6311589667061ac5
|
[
"MulanPSL-1.0"
] | null | null | null |
jsjy/models.py
|
Qin6468/Schoolmanagement
|
8991bd12069d249156ced89a6311589667061ac5
|
[
"MulanPSL-1.0"
] | 1
|
2021-01-05T06:46:53.000Z
|
2021-01-05T06:46:53.000Z
|
jsjy/models.py
|
Qin6468/Schoolmanagement
|
8991bd12069d249156ced89a6311589667061ac5
|
[
"MulanPSL-1.0"
] | 3
|
2021-01-04T12:20:23.000Z
|
2021-01-04T14:01:29.000Z
|
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class Admin(db.Model):
"""用户表
"""
id = db.Column(db.Integer, primary_key=True)
admin = db.Column(db.String(20), unique=True, nullable=False)
password = db.Column(db.String(32), nullable=False)
salt = db.Column(db.String(32), nullable=False)
level = db.Column(db.Integer)
name = db.Column(db.String(60), nullable=False)
status = db.Column(db.Integer)
def __init__(self, admin, password,salt,level,name,status):
self.admin=admin
self.password=password
self.salt=salt
self.level=level
self.name=name
self.status=status
def __repr__(self):
return '<id %r>' % self.id
class Teacher(db.Model):
"""
教师
"""
id = db.Column(db.Integer, primary_key=True)
a_id = db.Column(db.Integer)
name = db.Column(db.String(60), nullable=False)
cid = db.Column(db.String(18), nullable=False)
in_time = db.Column(db.Integer)
out_time = db.Column(db.Integer)
oa_time = db.Column(db.Integer)
info = db.Column(db.Text, nullable=False)
def __init__(self, a_id,name,cid,in_time,out_time,oa_time,info):
self.a_id=a_id
self.name=name
self.cid=cid
self.in_time=in_time
self.out_time=out_time
self.oa_time=oa_time
self.info=info
def __repr__(self):
return '<Teacher id %r>' % self.id
class Class(db.Model):
"""
教师
"""
id = db.Column(db.Integer, primary_key=True)
t_id = db.Column(db.Integer)
# t_id = db.Column(db.Integer, db.ForeignKey('Teacher.a_id'))
name = db.Column(db.String(60), nullable=False)
addtime = db.Column(db.Integer)
user_count = db.Column(db.Integer)
def __init__(self, t_id,name,addtime,user_count):
self.t_id=t_id
self.name=name
self.addtime=addtime
self.user_count=user_count
def __repr__(self):
return '<Class id %r>' % self.id
class Student(db.Model):
"""
学生
"""
id = db.Column(db.Integer, primary_key=True)
class_id = db.Column(db.Integer)
# class_id = db.Column(db.Integer, db.ForeignKey('Class.id'))
name = db.Column(db.String(60), nullable=False)
code = db.Column(db.String(20), nullable=False)
cid = db.Column(db.String(18), nullable=False)
in_time = db.Column(db.Integer)
out_time = db.Column(db.Integer)
info = db.Column(db.Text, nullable=False)
l_name = db.Column(db.String(60), nullable=False)
l_phone = db.Column(db.String(11), nullable=False)
l2_name = db.Column(db.String(60), nullable=False)
l2_phone = db.Column(db.String(11), nullable=False)
add = db.Column(db.String(255), nullable=False)
def __init__(self, class_id,name,code,cid,in_time,out_time,info,l_name,l_phone,l2_name,l2_phone,add):
self.class_id=class_id
self.name=name
self.code=code
self.cid=cid
self.in_time=in_time
self.out_time=out_time
self.info=info
self.l_name=l_name
self.l_phone=l_phone
self.l2_name=l2_name
self.l2_phone=l2_phone
self.add=add
def __repr__(self):
return '<Student id %r>' % self.id
class Score(db.Model):
"""
学生
"""
id = db.Column(db.Integer, primary_key=True)
uid = db.Column(db.Integer)
# uid = db.Column(db.Integer, db.ForeignKey('Student.id'))
score = db.Column(db.Float(3,1), nullable=False)
k_time = db.Column(db.Integer)
kskc = db.Column(db.String(60), nullable=False)
def __init__(self, uid,score,k_time,kskc):
self.uid=uid
self.score=score
self.kskc=kskc
self.k_time=k_time
def __repr__(self):
return '<Student id %r>' % self.id
class Course(db.Model):
"""
课程
"""
id = db.Column(db.Integer, primary_key=True)
c_id = db.Column(db.Integer)
name = db.Column(db.String(60), nullable=False)
college = db.Column(db.String(20), nullable=False)
credit = db.Column(db.Integer)
semester_hour = db.Column(db.Integer)
number=db.Column(db.Integer)
time = db.Column(db.String(60), nullable=False)
local=db.Column(db.String(60), nullable=False)
info = db.Column(db.Text, nullable=False)
def __init__(self,c_id,name,college,credit,semester_hour,number,time,local,info):
self.c_id=c_id
self.name=name
self.college=college
self.credit=credit
self.semester_hour=semester_hour
self.number=number
self.time=time
self.local=local
self.info=info
def __repr__(self):
return '<Course c_id %r>' % self.c_id
class cc(db.Model):
"""
班级课程表
"""
id = db.Column(db.Integer, primary_key=True)
c_id = db.Column(db.Integer, db.ForeignKey('Course.c_id'))
class_id = db.Column(db.Integer, db.ForeignKey('Class.id'))
def __init__(self, c_id, class_id):
self.c_id = c_id
self.class_id = class_id
def __repr__(self):
return '<Course id %r>' % self.id
class sc(db.Model):
"""
学生成绩表
"""
id = db.Column(db.Integer, primary_key=True)
uid = db.Column(db.Integer, db.ForeignKey('Student.id'))
c_id = db.Column(db.Integer, db.ForeignKey('Course.c_id'))
gid = db.Column(db.Integer, db.ForeignKey('Score.id'))
def __init__(self, uid, c_id, gid):
self.uid = uid
self.c_id = c_id
self.gid = gid
def __repr__(self):
return '<Student id %r>' % self.id
class tc(db.Model):
"""
老师课程表
"""
id = db.Column(db.Integer, primary_key=True)
tid = db.Column(db.Integer, db.ForeignKey('Teacher.a_id'))
c_id = db.Column(db.Integer, db.ForeignKey('Course.c_id'))
def __init__(self, tid, c_id):
self.tid = tid
self.c_id = c_id
def __repr__(self):
return '<Teacher id %r>' % self.id
class Evaluation(db.Model):
"""
教师评价
"""
id = db.Column(db.Integer, primary_key=True)
sid = db.Column(db.String(20))
tid = db.Column(db.String(20))
c_id = db.Column(db.Integer)
score1 = db.Column(db.Integer)
score2 = db.Column(db.Integer)
score3 = db.Column(db.Integer)
score4 = db.Column(db.Integer)
score5 = db.Column(db.Integer)
commit = db.Column(db.String(60))
def __init__(self, sid, tid, c_id, score1, score2, score3, score4, score5, commit):
self.sid = sid
self.tid = tid
self.c_id = c_id
self.score1 = score1
self.score2 = score2
self.score3 = score3
self.score4 = score4
self.score5 = score5
self.commit = commit
def __repr__(self):
return '<Evaluation id %r>' % self.id
class Thesis(db.Model):
id = db.Column(db.Integer, primary_key=True)
t_id= db.Column(db.String(20)) #论文id
sid = db.Column(db.String(20)) #学生
tid = db.Column(db.String(20)) #老师
topic = db.Column(db.String(20), nullable=False)
info = db.Column(db.Text, nullable=False)
status = db.Column(db.Integer)
def __init__(self, t_id, sid, tid, topic, info, status):
self.t_id = t_id
self.sid = sid
self.tid = tid
self.topic = topic
self.info = info
self.status = status
def __repr__(self):
return '<Paperselection id %r>' % self.id
| 28.944223
| 105
| 0.625877
|
76c938a8ba8e948cb9c1f600ec4af0d5877eabec
| 1,888
|
py
|
Python
|
edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HPCQPF.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | null | null | null |
edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HPCQPF.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | null | null | null |
edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HPCQPF.py
|
srcarter3/awips2
|
37f31f5e88516b9fd576eaa49d43bfb762e1d174
|
[
"Apache-2.0"
] | 1
|
2021-10-30T00:03:05.000Z
|
2021-10-30T00:03:05.000Z
|
##
# This software was developed and / or modified by Raytheon Company,
# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
#
# U.S. EXPORT CONTROLLED TECHNICAL DATA
# This software product contains export-restricted data whose
# export/transfer/disclosure is restricted by U.S. law. Dissemination
# to non-U.S. persons whether in the United States or abroad requires
# an export license or other authorization.
#
# Contractor Name: Raytheon Company
# Contractor Address: 6825 Pine Street, Suite 340
# Mail Stop B8
# Omaha, NE 68106
# 402.291.0100
#
# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
# further licensing information.
##
##
# This is a base file that is not intended to be overridden.
#
# This file can be subclassed to override behavior. Please see the
# Configuration Guides->Smart Initialization Configuration section of the GFE
# Online Help for guidance on creating a new smart init
##
from Init import *
##--------------------------------------------------------------------------
## Module that calculates surface weather elements from HPC QPF model output.
##
##--------------------------------------------------------------------------
class HPCQPFForecaster(Forecaster):
def __init__(self):
Forecaster.__init__(self, "HPCQPF", "HPCQPF")
##--------------------------------------------------------------------------
## Calculates HPC QPF from the QPF model
##--------------------------------------------------------------------------
def calcQPF(self, tpHPC_SFC):
# Assign the value filtering out everything above 1000 mm
grid = where(greater(tpHPC_SFC, 1000), float32(0.0), tpHPC_SFC / 25.4)
return clip(grid, 0, 5) # clip at zero and 5 inches
def main():
HPCQPFForecaster().run()
| 37.76
| 78
| 0.577331
|
f1132334b867a0a393c0473b7b9b0241f1846dc9
| 1,740
|
py
|
Python
|
recipes/multisig/1_create_address.py
|
GRS-Community/pycoin
|
4a9b9722c91e2831519ddf9675fe8c70246432b7
|
[
"MIT"
] | 5
|
2017-12-15T13:40:50.000Z
|
2021-12-18T13:18:54.000Z
|
recipes/multisig/1_create_address.py
|
GRS-Community/pycoin
|
4a9b9722c91e2831519ddf9675fe8c70246432b7
|
[
"MIT"
] | 1
|
2018-08-06T03:48:14.000Z
|
2018-09-03T03:01:03.000Z
|
recipes/multisig/1_create_address.py
|
GRS-Community/pycoin
|
4a9b9722c91e2831519ddf9675fe8c70246432b7
|
[
"MIT"
] | 6
|
2018-08-24T18:49:47.000Z
|
2021-01-19T10:04:08.000Z
|
#!/usr/bin/env python
# This script shows you how to create a "2-of-3" multisig address.
# It requires BIP32 private key file.
import os
import sys
from pycoin.key.BIP32Node import BIP32Node
from pycoin.serialize import b2h
from pycoin.ui import address_for_pay_to_script
from pycoin.tx.pay_to.ScriptMultisig import ScriptMultisig
def main():
if len(sys.argv) != 2:
print("usage: %s bip32_key_file" % sys.argv[0])
sys.exit(-1)
with open(sys.argv[1], "r") as f:
hwif = f.readline().strip()
# turn the bip32 text into a BIP32Node object
BIP32_KEY = BIP32Node.from_hwif(hwif)
# create three sec_keys (these are public keys, streamed using the SEC format)
SEC_0 = BIP32_KEY.subkey_for_path("0/0/0").sec()
SEC_1 = BIP32_KEY.subkey_for_path("0/1/0").sec()
SEC_2 = BIP32_KEY.subkey_for_path("0/2/0").sec()
public_key_sec_list = [SEC_0, SEC_1, SEC_2]
# create the 2-of-3 multisig script
# any 2 signatures can release the funds
pay_to_multisig_script = ScriptMultisig(2, public_key_sec_list).script()
# create a "2-of-3" multisig address_for_multisig
the_address = address_for_pay_to_script(pay_to_multisig_script)
print("Here is your pay 2-of-3 address: %s" % the_address)
print("Here is the pay 2-of-3 script: %s" % b2h(pay_to_multisig_script))
print("The hex script should go into p2sh_lookup.hex")
base_dir = os.path.dirname(sys.argv[1])
print("The three WIFs are written into %s as wif0, wif1 and wif2" % base_dir)
for i in range(3):
wif = BIP32_KEY.subkey_for_path("0/%d/0" % i).wif()
with open(os.path.join(base_dir, "wif%d" % i), "w") as f:
f.write(wif)
if __name__ == '__main__':
main()
| 31.636364
| 82
| 0.683908
|
01ac40107585cdef221ae7135388afa87cf265f6
| 8,714
|
py
|
Python
|
cogs/commands/misc/giveaway.py
|
Ultra03/BottyMcBotface
|
40226c1bdf2d05ff7835101f7a8a389828ec70b0
|
[
"MIT"
] | null | null | null |
cogs/commands/misc/giveaway.py
|
Ultra03/BottyMcBotface
|
40226c1bdf2d05ff7835101f7a8a389828ec70b0
|
[
"MIT"
] | null | null | null |
cogs/commands/misc/giveaway.py
|
Ultra03/BottyMcBotface
|
40226c1bdf2d05ff7835101f7a8a389828ec70b0
|
[
"MIT"
] | null | null | null |
import discord
import asyncio
import datetime
import pytimeparse
import random
import traceback
from cogs.utils.tasks import end_giveaway
from data.giveaway import Giveaway as GiveawayDB
from discord.ext import commands
class Giveaway(commands.Cog):
def __init__(self, bot):
self.bot = bot
async def prompt(self, ctx, data, _type):
question = data['prompt']
convertor = data['convertor']
def wait_check(m):
return m.author == ctx.author and m.channel == ctx.channel
ret = None
prompt = await ctx.send(question)
try:
response = await self.bot.wait_for('message', check=wait_check, timeout=120)
except asyncio.TimeoutError:
return
else:
await response.delete()
await prompt.delete()
if response.content.lower() == "cancel":
return
elif response.content is not None and response.content != "":
if _type in ['name', 'winners', 'time']:
ret = convertor(response.content)
if _type == 'winners' and ret < 1:
raise commands.BadArgument("Can't have less than 1 winner")
if ret is None:
raise commands.BadArgument(f"Improper value given for {_type}")
else:
ret = await convertor(ctx, response.content)
return ret
@commands.guild_only()
@commands.max_concurrency(1, per=commands.BucketType.member, wait=False)
@commands.group()
async def giveaway(self, ctx):
"""
Manage giveaways using !giveaway <action>, choosing from below...
"""
if not self.bot.settings.permissions.hasAtLeast(ctx.guild, ctx.author, 6):
raise commands.BadArgument(
"You need to be an administrator or higher to use that command.")
if ctx.invoked_subcommand is None:
raise commands.BadArgument("Invalid giveaway subcommand passed. Options: `start`, `reroll`, `end`")
@giveaway.command()
async def start(self, ctx, sponsor: discord.Member = None, time: str = None, winners: int = -1, channel: discord.TextChannel = None):
"""Start a giveaway. Use `!giveaway start` and follow the prompts, or see the example.
Example Use:
------------
!giveaway start (You will be prompted for all info)
!giveaway start @habibi test#1531 30s 1 #bot-commands
Parameters
----------
sponsor : discord.Member
Who sponsored the giveaway
time : str, optional
When to end, by default None
winners : int
How many winners
channel : discord.TextChannel, optional
Channel to post giveway in
Raises
------
commands.BadArgument
[description]
"""
prompts = {
'name': {
'convertor': str,
'prompt': "Enter a name for the giveaway (or type cancel to cancel)"
},
'sponsor': {
'convertor': commands.MemberConverter().convert,
'prompt': "Enter the sponsor's user ID (or type cancel to cancel)"
},
'time': {
'convertor': pytimeparse.parse,
'prompt': "Enter the time until the giveaway ends (or type cancel to cancel)"
},
'winners': {
'convertor': int,
'prompt': "Enter the amount of winners for the giveaway (or type cancel to cancel)"
},
'channel': {
'convertor': commands.TextChannelConverter().convert,
'prompt': "Mention the channel to post the giveaway in (or type cancel to cancel)"
}
}
responses = {
'name': None,
'sponsor': sponsor,
'time': pytimeparse.parse(time) if time is not None else None,
'winners': None if winners < 1 else winners,
'channel': channel
}
for response in responses:
if responses[response] is None:
res = await self.prompt(ctx=ctx, data=prompts[response], _type=response)
if res is None:
raise commands.BadArgument("Command cancelled.")
responses[response] = res
now = datetime.datetime.now()
delta = responses['time']
end_time = now + datetime.timedelta(seconds=delta)
embed = discord.Embed(title=responses['name'])
embed.description = f"Hosted by {responses['sponsor'].mention}\n{responses['winners']} {'winner' if responses['winners'] == 1 else 'winners'}"
embed.timestamp = end_time
embed.set_footer(text="Ends")
message = await channel.send(embed=embed)
await message.add_reaction("✅")
await ctx.message.delete()
giveaway = GiveawayDB(_id=message.id, channel=responses['channel'].id, name=responses['name'], winners=responses['winners'])
giveaway.save()
await ctx.send(f"Giveaway started!", embed=embed, delete_after=10)
self.bot.settings.tasks.schedule_end_giveaway(channel_id=channel.id, message_id=message.id, date=end_time, winners=responses['winners'])
@giveaway.command()
async def reroll(self, ctx, message: discord.Message):
"""Pick a new winner of an already ended giveaway.
Example usage
-------------
!giveaway reroll 795120157679812670
Parameters
----------
message : discord.Message
ID of the giveaway message
"""
g = await self.bot.settings.get_giveaway(id=message.id)
if g is None:
raise commands.BadArgument("Couldn't find an ended giveaway by the provided ID.")
elif not g.is_ended:
raise commands.BadArgument("That giveaway hasn't ended yet!")
elif len(g.entries) == 0:
raise commands.BadArgument(f"There are no entries for the giveaway of **{g.name}**.")
elif len(g.entries) <= len(g.previous_winners):
raise commands.BadArgument("No more winners are possible!")
the_winner = None
while the_winner is None:
random_id = random.choice(g.entries)
the_winner = ctx.guild.get_member(random_id)
print(the_winner.id not in g.previous_winners)
if the_winner is not None and the_winner.id not in g.previous_winners:
break
the_winner = None
g.previous_winners.append(the_winner.id)
g.save()
await ctx.message.delete()
channel = ctx.guild.get_channel(g.channel)
await channel.send(f"**Reroll**\nThe new winner of the giveaway of **{g.name}** is {the_winner.mention}! Congratulations!")
@giveaway.command()
async def end(self, ctx, message: discord.Message):
"""End a giveaway early
Example usage
-------------
!giveaway end 795120157679812670
Parameters
----------
message : discord.Message
ID of the giveaway message
"""
giveaway = self.bot.settings.get_giveaway(_id=message.id)
if giveaway is None:
raise commands.BadArgument("A giveaway with that ID was not found.")
elif giveaway.is_ended:
raise commands.BadArgument("That giveaway has already ended.")
await ctx.message.delete()
self.bot.settings.tasks.tasks.remove_job(str(message.id + 2), 'default')
await end_giveaway(message.channel.id, message.id, giveaway.winners)
@giveaway.error
@start.error
@end.error
@reroll.error
async def info_error(self, ctx, error):
await ctx.message.delete(delay=5)
if (isinstance(error, commands.MissingRequiredArgument)
or isinstance(error, commands.BadArgument)
or isinstance(error, commands.BadUnionArgument)
or isinstance(error, commands.MissingPermissions)
or isinstance(error, commands.CommandInvokeError)
or isinstance(error, commands.CommandOnCooldown)
or isinstance(error, commands.BotMissingPermissions)
or isinstance(error, commands.MaxConcurrencyReached)
or isinstance(error, commands.NoPrivateMessage)):
await self.bot.send_error(ctx, error)
traceback.print_exc()
else:
await self.bot.send_error(ctx, "A fatal error occured. Tell <@109705860275539968> about this.")
traceback.print_exc()
def setup(bot):
bot.add_cog(Giveaway(bot))
| 37.239316
| 150
| 0.59261
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.