hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cf45ac4c7331b5d13f80e9f6d9bdf7fe75c43bf8
| 258
|
py
|
Python
|
ex015aluguelcarro.py
|
wtomalves/exerciciopython
|
5c239521830cb8d092c7ff8646ff1f38c605509d
|
[
"MIT"
] | 1
|
2020-09-04T22:36:21.000Z
|
2020-09-04T22:36:21.000Z
|
ex015aluguelcarro.py
|
wtomalves/exerciciopython
|
5c239521830cb8d092c7ff8646ff1f38c605509d
|
[
"MIT"
] | null | null | null |
ex015aluguelcarro.py
|
wtomalves/exerciciopython
|
5c239521830cb8d092c7ff8646ff1f38c605509d
|
[
"MIT"
] | null | null | null |
dias = float(input('Quantos dias de aluguel do carro?'))
kilometro = float(input('Quantos kilometros percorridos?'))
custos = (dias * 60) + (kilometro * 0.15)
print('valor total do aluguel pelos dias e kilometragem rodada é de R${:.2f}!'.format(custos))
| 28.666667
| 94
| 0.705426
|
f3d121bf52eda7abb3f77b3449f1ccbb4760070e
| 548
|
py
|
Python
|
pidal/protocol/mysql/field_type.py
|
pi-plan/pidal
|
bfd1b9c4de87bc92565acbcff108270265757e39
|
[
"BSD-3-Clause"
] | 6
|
2021-02-05T04:21:00.000Z
|
2021-11-29T06:46:21.000Z
|
pidal/protocol/mysql/field_type.py
|
pi-plan/pidal
|
bfd1b9c4de87bc92565acbcff108270265757e39
|
[
"BSD-3-Clause"
] | 1
|
2021-11-30T06:08:53.000Z
|
2021-11-30T06:08:53.000Z
|
pidal/protocol/mysql/field_type.py
|
pi-plan/pidal
|
bfd1b9c4de87bc92565acbcff108270265757e39
|
[
"BSD-3-Clause"
] | null | null | null |
import enum
@enum.unique
class FieldType(enum.IntEnum):
DECIMAL = 0
TINY = 1
SHORT = 2
LONG = 3
FLOAT = 4
DOUBLE = 5
NULL = 6
TIMESTAMP = 7
LONGLONG = 8
INT24 = 9
DATE = 10
TIME = 11
DATETIME = 12
YEAR = 13
NEWDATE = 14
VARCHAR = 15
BIT = 16
JSON = 245
NEWDECIMAL = 246
ENUM = 247
SET = 248
TINY_BLOB = 249
MEDIUM_BLOB = 250
LONG_BLOB = 251
BLOB = 252
VAR_STRING = 253
STRING = 254
GEOMETRY = 255
CHAR = TINY
INTERVAL = ENUM
| 14.810811
| 30
| 0.536496
|
4af4f7ecab2169bd471fdcc2fd7fddae48d99f79
| 1,581
|
py
|
Python
|
common_pp/completion_video_common.py
|
qxcv/structuredinference
|
9eb6546db9ca2f9d02b8a8155c2d3fc4f0d27ecb
|
[
"MIT"
] | 6
|
2017-09-13T10:49:13.000Z
|
2022-01-01T10:53:03.000Z
|
common_pp/completion_video_common.py
|
qxcv/structuredinference
|
9eb6546db9ca2f9d02b8a8155c2d3fc4f0d27ecb
|
[
"MIT"
] | null | null | null |
common_pp/completion_video_common.py
|
qxcv/structuredinference
|
9eb6546db9ca2f9d02b8a8155c2d3fc4f0d27ecb
|
[
"MIT"
] | 2
|
2017-07-31T04:12:30.000Z
|
2018-11-09T22:34:36.000Z
|
"""Common (dataset-shared) code for making completino videos"""
import re
import os
import numpy as np
from scipy.optimize import fmin
_num_re = re.compile(r'(\d+)')
def load_sorted_paths(frame_dir):
"""Sorts a bunch of paths that have numbers in the filename."""
fns = os.listdir(frame_dir)
everything = []
for fn in fns:
bn = os.path.basename(fn)
# if we get many numbers then there is a bug
num_str, = _num_re.findall(bn)
thing_id = int(num_str)
everything.append((thing_id, os.path.join(frame_dir, fn)))
return [p for i, p in sorted(everything)]
def alignment_constant(rec_x, true_x):
# This almost certainly has a simple analytic solution, but I can't be
# bothered finding it right now. Instead, I'm centring both, scaling until
# they match, then returning alpha and beta required to do the scaling
# for other samples.
# Update: ...unsurprisingly, this doesn't work very well :(
# expect single poses (2*j)
assert true_x.shape == rec_x.shape
assert true_x.shape[0] == 2 and rec_x.shape[0] == 2
assert true_x.ndim == 2 and rec_x.ndim == 2
rec_cen = rec_x - rec_x.mean(axis=1)[:, None]
true_cen = true_x - true_x.mean(axis=1)[:, None]
def objective(a):
return np.sqrt(np.sum((rec_cen * a - true_cen).flatten()**2))
opt_result = fmin(objective, x0=19)
alpha, = opt_result
# to reconstruct: (rec_x - rec_x.mean(axis=1)) * alpha +
# true_x.mean(axis=1)
beta = true_x.mean(axis=1) - alpha * rec_x.mean(axis=1)
return alpha, beta
| 32.9375
| 78
| 0.657179
|
f6a897aae00b4369774b0acaa4f9ce65a6d54378
| 7,504
|
py
|
Python
|
evaluate.py
|
RAYTRAC3R/FastSpeech2
|
268ef2e81afc0280285f72a6c8fadd213128158f
|
[
"MIT"
] | 1
|
2020-12-06T03:15:57.000Z
|
2020-12-06T03:15:57.000Z
|
evaluate.py
|
eric102004/FastSpeech2-1
|
95e0fa7f02b390fa21ee9801cfa297d25163f71d
|
[
"MIT"
] | null | null | null |
evaluate.py
|
eric102004/FastSpeech2-1
|
95e0fa7f02b390fa21ee9801cfa297d25163f71d
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
import numpy as np
import os
import argparse
import re
from g2p_en import G2p
from fastspeech2 import FastSpeech2
from loss import FastSpeech2Loss
from dataset import Dataset
from text import text_to_sequence, sequence_to_text
import hparams as hp
import utils
import audio as Audio
import vocoder
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
def get_FastSpeech2(num):
checkpoint_path = os.path.join(hp.checkpoint_path, "checkpoint_{}.pth.tar".format(num))
model = nn.DataParallel(FastSpeech2())
model.load_state_dict(torch.load(checkpoint_path)['model'])
model.requires_grad = False
model.eval()
return model
def evaluate(model, step, vocoder=None):
torch.manual_seed(0)
# Get dataset
dataset = Dataset("val.txt", sort=False)
loader = DataLoader(dataset, batch_size=hp.batch_size**2, shuffle=False, collate_fn=dataset.collate_fn, drop_last=False, num_workers=0)
# Get loss function
Loss = FastSpeech2Loss().to(device)
# Evaluation
d_l = []
f_l = []
e_l = []
mel_l = []
mel_p_l = []
current_step = 0
idx = 0
for i, batchs in enumerate(loader):
for j, data_of_batch in enumerate(batchs):
# Get Data
if hp.use_spk_embed:
spk_ids = torch.tensor(data_of_batch["spk_ids"]).to(torch.int64).to(device)
else:
spk_ids = None
id_ = data_of_batch["id"]
text = torch.from_numpy(data_of_batch["text"]).long().to(device)
mel_target = torch.from_numpy(data_of_batch["mel_target"]).float().to(device)
D = torch.from_numpy(data_of_batch["D"]).int().to(device)
log_D = torch.from_numpy(data_of_batch["log_D"]).int().to(device)
f0 = torch.from_numpy(data_of_batch["f0"]).float().to(device)
energy = torch.from_numpy(data_of_batch["energy"]).float().to(device)
src_len = torch.from_numpy(data_of_batch["src_len"]).long().to(device)
mel_len = torch.from_numpy(data_of_batch["mel_len"]).long().to(device)
max_src_len = np.max(data_of_batch["src_len"]).astype(np.int32)
max_mel_len = np.max(data_of_batch["mel_len"]).astype(np.int32)
with torch.no_grad():
# Forward
mel_output, mel_postnet_output, log_duration_output, f0_output, energy_output, src_mask, mel_mask, out_mel_len = model(
text, src_len, mel_len, D, f0, energy, max_src_len, max_mel_len, spk_ids)
# Cal Loss
mel_loss, mel_postnet_loss, d_loss, f_loss, e_loss = Loss(
log_duration_output, log_D, f0_output, f0, energy_output, energy, mel_output, mel_postnet_output, mel_target, ~src_mask, ~mel_mask)
d_l.append(d_loss.item())
f_l.append(f_loss.item())
e_l.append(e_loss.item())
mel_l.append(mel_loss.item())
mel_p_l.append(mel_postnet_loss.item())
if vocoder is not None:
# Run vocoding and plotting spectrogram only when the vocoder is defined
for k in range(len(mel_target)):
basename = id_[k]
gt_length = mel_len[k]
out_length = out_mel_len[k]
mel_target_torch = mel_target[k:k+1, :gt_length].transpose(1, 2).detach()
mel_target_ = mel_target[k, :gt_length].cpu().transpose(0, 1).detach()
mel_postnet_torch = mel_postnet_output[k:k+1, :out_length].transpose(1, 2).detach()
mel_postnet = mel_postnet_output[k, :out_length].cpu().transpose(0, 1).detach()
if hp.vocoder == 'melgan':
vocoder.melgan_infer(mel_target_torch, vocoder, os.path.join(hp.eval_path, 'ground-truth_{}_{}.wav'.format(basename, hp.vocoder)))
vocoder.melgan_infer(mel_postnet_torch, vocoder, os.path.join(hp.eval_path, 'eval_{}_{}.wav'.format(basename, hp.vocoder)))
elif hp.vocoder == 'waveglow':
vocoder.waveglow_infer(mel_target_torch, vocoder, os.path.join(hp.eval_path, 'ground-truth_{}_{}.wav'.format(basename, hp.vocoder)))
vocoder.waveglow_infer(mel_postnet_torch, vocoder, os.path.join(hp.eval_path, 'eval_{}_{}.wav'.format(basename, hp.vocoder)))
np.save(os.path.join(hp.eval_path, 'eval_{}_mel.npy'.format(basename)), mel_postnet.numpy())
f0_ = f0[k, :gt_length].detach().cpu().numpy()
energy_ = energy[k, :gt_length].detach().cpu().numpy()
f0_output_ = f0_output[k, :out_length].detach().cpu().numpy()
energy_output_ = energy_output[k, :out_length].detach().cpu().numpy()
utils.plot_data([(mel_postnet.numpy(), f0_output_, energy_output_), (mel_target_.numpy(), f0_, energy_)],
['Synthesized Spectrogram', 'Ground-Truth Spectrogram'], filename=os.path.join(hp.eval_path, 'eval_{}.png'.format(basename)))
idx += 1
current_step += 1
d_l = sum(d_l) / len(d_l)
f_l = sum(f_l) / len(f_l)
e_l = sum(e_l) / len(e_l)
mel_l = sum(mel_l) / len(mel_l)
mel_p_l = sum(mel_p_l) / len(mel_p_l)
str1 = "FastSpeech2 Step {},".format(step)
str2 = "Duration Loss: {}".format(d_l)
str3 = "F0 Loss: {}".format(f_l)
str4 = "Energy Loss: {}".format(e_l)
str5 = "Mel Loss: {}".format(mel_l)
str6 = "Mel Postnet Loss: {}".format(mel_p_l)
print("\n" + str1)
print(str2)
print(str3)
print(str4)
print(str5)
print(str6)
with open(os.path.join(hp.log_path, "eval.txt"), "a") as f_log:
f_log.write(str1 + "\n")
f_log.write(str2 + "\n")
f_log.write(str3 + "\n")
f_log.write(str4 + "\n")
f_log.write(str5 + "\n")
f_log.write(str6 + "\n")
f_log.write("\n")
return d_l, f_l, e_l, mel_l, mel_p_l
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--step', type=int, default=30000)
args = parser.parse_args()
# Get model
model = get_FastSpeech2(args.step).to(device)
print("Model Has Been Defined")
num_param = utils.get_param_num(model)
print('Number of FastSpeech2 Parameters:', num_param)
# Load vocoder
if hp.vocoder == 'melgan':
vocoder = vocoder.get_melgan()
elif hp.vocoder == 'waveglow':
vocoder = vocoder.get_waveglow()
vocoder.to(device)
# Init directories
if not os.path.exists(hp.log_path):
os.makedirs(hp.log_path)
if not os.path.exists(hp.eval_path):
os.makedirs(hp.eval_path)
evaluate(model, args.step, vocoder)
| 43.627907
| 161
| 0.5617
|
74b19d9298d0139cb1dcb941922ffdd2adeb0792
| 9,211
|
py
|
Python
|
Request.py
|
aryaman4/EasyWeekScheduler
|
298aed983009e9dcf308ffef779372737fad036a
|
[
"Apache-2.0"
] | 1
|
2019-02-07T17:55:15.000Z
|
2019-02-07T17:55:15.000Z
|
Request.py
|
aryaman4/EasyWeekScheduler
|
298aed983009e9dcf308ffef779372737fad036a
|
[
"Apache-2.0"
] | null | null | null |
Request.py
|
aryaman4/EasyWeekScheduler
|
298aed983009e9dcf308ffef779372737fad036a
|
[
"Apache-2.0"
] | null | null | null |
from uiucapi.query import *
from requests_xml import XMLSession
from URLBuilder import *
session = XMLSession()
class Request(object):
def __init__(self, in_year, in_semester):
self.subjects = list()
self.course_list = list()
self.year = in_year
self.semester = in_semester
self.subject_code = ""
self.course_num = ""
self.u = URLBuilder(self.year, self.semester)
def set_subject_code(self, in_subject):
"""
method to set subject code
:param in_subject: input subject
"""
self.subject_code = in_subject
self.u.set_subject_code(self.subject_code)
def set_course_num(self, in_course_num):
"""
method to set particular course
:param in_course_num: input course number
"""
self.course_num = in_course_num
self.u.set_course_num(self.course_num)
def build_course_str(self):
course_str = self.subject_code + ' ' + self.course_num + ' '
if self.semester == 'fall':
course_str += 'FA'
elif self.semester == 'spring':
course_str += 'SP'
course_str += self.year[2:]
return course_str
def get_subjects(self):
"""
method to build a list of all subjects
"""
for i in range(1, 200):
s = '//subject[%s]/@id' % (str(i))
r = session.get(self.u.get_url())
item = r.xml.xpath(s, first=True)
if item is None:
break
self.subjects.append(item)
def get_courses(self):
"""
method to set all courses
"""
r = session.get(self.u.get_url())
for i in range(1, 200):
xp = '//course[%s]/@id' % (str(i))
l = r.xml.xpath(xp, first=True)
if l is None:
break
self.course_list.append(l)
def course_description(self):
"""
method to get course description
:return: description of the course
"""
r = session.get(self.u.get_url())
desc = r.xml.xpath("//description/text()", first=True)
return desc
def get_prereq(self):
"""
get all prerequisites for a given course
:return: prerequisites of the given course
"""
s = self.course_description()
temp = "Prerequisite:"
index = s.find(temp)
s = s[index: len(s)]
s = s[len(temp):]
parts = s.split(";")
parts = [part.strip() for part in parts]
pre_reqs = [list() for _ in parts]
for j in range(len(parts)):
s = parts[j]
i = len(s) - 1
while i >= 0:
if s[i].isdigit() or s[i] == " " or s[i].isupper():
end = i + 1
while i >= 0 and (s[i].isdigit() or s[i] == " " or s[i].isupper()):
i -= 1
i += 1
pre_reqs[j].append(s[i:end])
i -= 1
for i in range(len(pre_reqs)):
temp = []
for option in pre_reqs[i]:
option = option.strip()
if len(option) > 1:
temp.append(option)
pre_reqs[i] = temp
return pre_reqs
def get_open_sections(self):
"""
get a list of all open sections for the course
:return: a list of all open sections for the course
"""
course_str = self.build_course_str()
course = get_course(course_str)
open_sections = []
lec_r = False
labs_r = False
disc_r = False
if ('Lecture' or 'Lecture-Discussion') in self.get_types():
lec_r = True
if ('Discussion' or 'Lecture-Discussion' or 'Discussion/Recitation') in self.get_types():
disc_r = True
if ('Laboratory' or 'Laboratory-Discussion') in self.get_types():
labs_r = True
for section in course.sections:
if "open" in section.registration_status.lower():
open_sections.append(tuple((section.section_number, section.crn)))
lec_open = False
disc_open = False
labs_open = False
for sec in open_sections:
x, y = sec
if 'lecture' in self.get_section_type(y).lower() or not lec_r:
lec_open = True
if 'discussion' in self.get_section_type(y).lower() or not disc_r:
disc_open = True
if 'lab' in self.get_section_type(y).lower() or not labs_r:
labs_open = True
if lec_open and disc_open and labs_open:
return open_sections
else:
raise Exception("Course not available")
def get_credit_hours(self):
"""
method to get number of credit hours the course is
:return: the number of credit hours
"""
r = session.get(self.u.get_url())
credit_hours = r.xml.xpath("//creditHours/text()", first=True)
return int(credit_hours)
def get_no_lectures(self):
"""
method to get number of lecture sections for the course
:return: the number of open lecture sections
"""
sections = self.get_open_sections()
count_lec = 0
for name, crn in sections:
if 'lecture' in self.req.get_section_type(crn).lower():
count_lec += 1
return count_lec
def get_no_disc(self):
"""
method to get number of discussion sections for the course
:return: the number of open discussion sections
"""
sections = self.get_open_sections()
count_disc = 0
for name, crn in sections:
if 'discussion' in self.req.get_section_type(crn).lower():
count_disc += 1
return count_disc
def get_no_lab(self):
"""
method to get number of lab sections for the course
:return: the number of open lab sections
"""
sections = self.get_open_sections()
count_disc = 0
for name, crn in sections:
if 'lab' in self.req.get_section_type(crn).lower():
count_disc += 1
return count_disc
def get_time(self, section_str):
"""
method to get the starting and ending times
:param section_str: the CRN of the section
:return: the starting and ending times of the section
"""
r = session.get(self.u.url + str(section_str) + ".xml")
start_time = r.xml.xpath("//meetings/meeting/start/text()", first=True)
end_time = r.xml.xpath("//meetings/meeting/end/text()", first=True)
s = ""
e = ""
if start_time[6] == 'P':
s += str(12 + int(start_time[1]))
e += str(12 + int(end_time[1]))
else:
s += start_time[0]
e += end_time[0]
s += start_time[1]
e += end_time[1]
s += start_time[2:5]
e += end_time[2:5]
classes_on_day = self.get_days(section_str).strip()
return tuple((s.strip(), e.strip(), classes_on_day))
def get_location(self, section_str):
"""
method to get the location
:param section_str: the CRN of the section
:return: the room number and building of the section
"""
r = session.get(self.u.url + str(section_str) + ".xml")
building = r.xml.xpath("//meetings/meeting/buildingName/text()", first=True)
room = r.xml.xpath("//meetings/meeting/roomNumber/text()", first=True)
return room + ' ' + building
def get_instructor(self, section_str):
"""
method to get the name of the instructor
:param section_str: the CRN of the section
:return: the name of the instructor
"""
r = session.get(self.u.url + str(section_str) + ".xml")
instructor = r.xml.xpath("//meetings/meeting/instructor/text()", first=True)
return instructor
def get_days(self, section_str):
"""
method to get days of the week
:param section_str: the CRN of the section
:return: the days of the week on which there is class
"""
r = session.get(self.u.url + str(section_str) + ".xml")
days = r.xml.xpath("//meetings/meeting/daysOfTheWeek/text()", first=True)
return days
def get_section_type(self, section_str):
"""
method to get type of the section
:param section_str: the CRN of the section
:return: the type of the section
"""
r = session.get(self.u.url + str(section_str) + ".xml")
type = r.xml.xpath("//meetings/meeting/type/text()", first=True)
return type
def get_types(self):
"""
method to return all required classes
:return:
"""
types = []
r = session.get(self.u.get_url())
for i in range(1, 200):
crn = '//section[%s]/@id' % (str(i))
item = r.xml.xpath(crn, first=True)
if item is None:
break
types.append(self.get_section_type(item))
return types
| 33.739927
| 97
| 0.546412
|
0271f36e11cf1a431a5d4b5796292f94a00de1ad
| 8,446
|
py
|
Python
|
pyro/contrib/gp/kernels/kernel.py
|
nipunbatra/pyro
|
d5b0545c4f992d435692080db6969314a2c32f05
|
[
"Apache-2.0"
] | 4,959
|
2017-11-03T14:39:17.000Z
|
2019-02-04T16:14:30.000Z
|
pyro/contrib/gp/kernels/kernel.py
|
nipunbatra/pyro
|
d5b0545c4f992d435692080db6969314a2c32f05
|
[
"Apache-2.0"
] | 985
|
2017-11-03T14:27:56.000Z
|
2019-02-02T18:52:54.000Z
|
pyro/contrib/gp/kernels/kernel.py
|
nipunbatra/pyro
|
d5b0545c4f992d435692080db6969314a2c32f05
|
[
"Apache-2.0"
] | 564
|
2017-11-03T15:05:55.000Z
|
2019-01-31T14:02:29.000Z
|
# Copyright (c) 2017-2019 Uber Technologies, Inc.
# SPDX-License-Identifier: Apache-2.0
import numbers
from pyro.contrib.gp.parameterized import Parameterized
class Kernel(Parameterized):
"""
Base class for kernels used in this Gaussian Process module.
Every inherited class should implement a :meth:`forward` pass which takes inputs
:math:`X`, :math:`Z` and returns their covariance matrix.
To construct a new kernel from the old ones, we can use methods :meth:`add`,
:meth:`mul`, :meth:`exp`, :meth:`warp`, :meth:`vertical_scale`.
References:
[1] `Gaussian Processes for Machine Learning`,
Carl E. Rasmussen, Christopher K. I. Williams
:param int input_dim: Number of feature dimensions of inputs.
:param torch.Tensor variance: Variance parameter of this kernel.
:param list active_dims: List of feature dimensions of the input which the kernel
acts on.
"""
def __init__(self, input_dim, active_dims=None):
super().__init__()
if active_dims is None:
active_dims = list(range(input_dim))
elif input_dim != len(active_dims):
raise ValueError(
"Input size and the length of active dimensionals should be equal."
)
self.input_dim = input_dim
self.active_dims = active_dims
def forward(self, X, Z=None, diag=False):
r"""
Calculates covariance matrix of inputs on active dimensionals.
:param torch.Tensor X: A 2D tensor with shape :math:`N \times input\_dim`.
:param torch.Tensor Z: An (optional) 2D tensor with shape
:math:`M \times input\_dim`.
:param bool diag: A flag to decide if we want to return full covariance matrix
or just its diagonal part.
:returns: covariance matrix of :math:`X` and :math:`Z` with shape
:math:`N \times M`
:rtype: torch.Tensor
"""
raise NotImplementedError
def _slice_input(self, X):
r"""
Slices :math:`X` according to ``self.active_dims``. If ``X`` is 1D then returns
a 2D tensor with shape :math:`N \times 1`.
:param torch.Tensor X: A 1D or 2D input tensor.
:returns: a 2D slice of :math:`X`
:rtype: torch.Tensor
"""
if X.dim() == 2:
return X[:, self.active_dims]
elif X.dim() == 1:
return X.unsqueeze(1)
else:
raise ValueError("Input X must be either 1 or 2 dimensional.")
class Combination(Kernel):
"""
Base class for kernels derived from a combination of kernels.
:param Kernel kern0: First kernel to combine.
:param kern1: Second kernel to combine.
:type kern1: Kernel or numbers.Number
"""
def __init__(self, kern0, kern1):
if not isinstance(kern0, Kernel):
raise TypeError(
"The first component of a combined kernel must be a " "Kernel instance."
)
if not (isinstance(kern1, Kernel) or isinstance(kern1, numbers.Number)):
raise TypeError(
"The second component of a combined kernel must be a "
"Kernel instance or a number."
)
active_dims = set(kern0.active_dims)
if isinstance(kern1, Kernel):
active_dims |= set(kern1.active_dims)
active_dims = sorted(active_dims)
input_dim = len(active_dims)
super().__init__(input_dim, active_dims)
self.kern0 = kern0
self.kern1 = kern1
class Sum(Combination):
"""
Returns a new kernel which acts like a sum/direct sum of two kernels.
The second kernel can be a constant.
"""
def forward(self, X, Z=None, diag=False):
if isinstance(self.kern1, Kernel):
return self.kern0(X, Z, diag=diag) + self.kern1(X, Z, diag=diag)
else: # constant
return self.kern0(X, Z, diag=diag) + self.kern1
class Product(Combination):
"""
Returns a new kernel which acts like a product/tensor product of two kernels.
The second kernel can be a constant.
"""
def forward(self, X, Z=None, diag=False):
if isinstance(self.kern1, Kernel):
return self.kern0(X, Z, diag=diag) * self.kern1(X, Z, diag=diag)
else: # constant
return self.kern0(X, Z, diag=diag) * self.kern1
class Transforming(Kernel):
"""
Base class for kernels derived from a kernel by some transforms such as warping,
exponent, vertical scaling.
:param Kernel kern: The original kernel.
"""
def __init__(self, kern):
super().__init__(kern.input_dim, kern.active_dims)
self.kern = kern
class Exponent(Transforming):
r"""
Creates a new kernel according to
:math:`k_{new}(x, z) = \exp(k(x, z)).`
"""
def forward(self, X, Z=None, diag=False):
return self.kern(X, Z, diag=diag).exp()
class VerticalScaling(Transforming):
"""
Creates a new kernel according to
:math:`k_{new}(x, z) = f(x)k(x, z)f(z),`
where :math:`f` is a function.
:param callable vscaling_fn: A vertical scaling function :math:`f`.
"""
def __init__(self, kern, vscaling_fn):
super().__init__(kern)
self.vscaling_fn = vscaling_fn
def forward(self, X, Z=None, diag=False):
if diag:
return (
self.vscaling_fn(X) * self.kern(X, Z, diag=diag) * self.vscaling_fn(X)
)
elif Z is None:
vscaled_X = self.vscaling_fn(X).unsqueeze(1)
return vscaled_X * self.kern(X, Z, diag=diag) * vscaled_X.t()
else:
return (
self.vscaling_fn(X).unsqueeze(1)
* self.kern(X, Z, diag=diag)
* self.vscaling_fn(Z).unsqueeze(0)
)
def _Horner_evaluate(x, coef):
"""
Evaluates the value of a polynomial according to Horner's method.
"""
# https://en.wikipedia.org/wiki/Horner%27s_method
n = len(coef) - 1
b = coef[n]
for i in range(n - 1, -1, -1):
b = coef[i] + b * x
return b
class Warping(Transforming):
"""
Creates a new kernel according to
:math:`k_{new}(x, z) = q(k(f(x), f(z))),`
where :math:`f` is an function and :math:`q` is a polynomial with non-negative
coefficients ``owarping_coef``.
We can take advantage of :math:`f` to combine a Gaussian Process kernel with a deep
learning architecture. For example:
>>> linear = torch.nn.Linear(10, 3)
>>> # register its parameters to Pyro's ParamStore and wrap it by lambda
>>> # to call the primitive pyro.module each time we use the linear function
>>> pyro_linear_fn = lambda x: pyro.module("linear", linear)(x)
>>> kernel = gp.kernels.Matern52(input_dim=3, lengthscale=torch.ones(3))
>>> warped_kernel = gp.kernels.Warping(kernel, pyro_linear_fn)
Reference:
[1] `Deep Kernel Learning`,
Andrew G. Wilson, Zhiting Hu, Ruslan Salakhutdinov, Eric P. Xing
:param callable iwarping_fn: An input warping function :math:`f`.
:param list owarping_coef: A list of coefficients of the output warping polynomial.
These coefficients must be non-negative.
"""
def __init__(self, kern, iwarping_fn=None, owarping_coef=None):
super().__init__(kern)
self.iwarping_fn = iwarping_fn
if owarping_coef is not None:
for coef in owarping_coef:
if not isinstance(coef, int) and coef < 0:
raise ValueError(
"Coefficients of the polynomial must be a "
"non-negative integer."
)
if len(owarping_coef) < 2 and sum(owarping_coef) == 0:
raise ValueError(
"The ouput warping polynomial should have a degree "
"of at least 1."
)
self.owarping_coef = owarping_coef
def forward(self, X, Z=None, diag=False):
if self.iwarping_fn is None:
K_iwarp = self.kern(X, Z, diag=diag)
elif Z is None:
K_iwarp = self.kern(self.iwarping_fn(X), None, diag=diag)
else:
K_iwarp = self.kern(self.iwarping_fn(X), self.iwarping_fn(Z), diag=diag)
if self.owarping_coef is None:
return K_iwarp
else:
return _Horner_evaluate(K_iwarp, self.owarping_coef)
| 32.484615
| 88
| 0.604547
|
080ccc7565081863763748f4e9928f2fcd1a1ed7
| 9,390
|
py
|
Python
|
project/server/models.py
|
lstrgiang/thesis-server
|
08c4293d104c3581c4bf166086c52eadb62adb4f
|
[
"MIT"
] | null | null | null |
project/server/models.py
|
lstrgiang/thesis-server
|
08c4293d104c3581c4bf166086c52eadb62adb4f
|
[
"MIT"
] | null | null | null |
project/server/models.py
|
lstrgiang/thesis-server
|
08c4293d104c3581c4bf166086c52eadb62adb4f
|
[
"MIT"
] | null | null | null |
import datetime
import jwt
from sqlalchemy import and_
from project.server import app, db, bcrypt
from Crypto.PublicKey import RSA
class User(db.Model):
"""
User Model for storing user related details
"""
__tablename__ = "user"
id = db.Column(db.Integer, primary_key = True, autoincrement = True)
email = db.Column(db.String(), unique=True, nullable=False)
password = db.Column(db.String(), nullable=False)
bday = db.Column(db.DateTime(), nullable=False)
fullname = db.Column(db.String(), nullable=False)
job = db.Column(db.String(), nullable=True)
country = db.Column(db.String(), nullable=True)
registered_on = db.Column(db.DateTime, nullable=False)
is_confirmed= db.Column(db.Boolean, nullable=True, default=False)
device_list = db.relationship('DeviceList', backref='user',lazy='dynamic')
def __init__(self, email, password, bday, fullname,
job=None, country=None):
self.email = email
self.password = bcrypt.generate_password_hash(
password, app.config.get('BCRYPT_LOG_ROUNDS')
).decode()
self.registered_on = datetime.datetime.now()
self.bday=bday
self.fullname=fullname
self.country=country
self.job=job
@staticmethod
def get_user_by_email(user_email):
return User.query.filter_by(email=user_email).first()
@staticmethod
def get_user_by_id(user_id):
return User.query.filter_by(id=user_id).first()
@staticmethod
def encode_auth_token(user_id, modulus=None, exponent=None, main_key=None, hasRoot=False):
"""
Generates the Auth Token
:param user_id key:
:return: string
"""
try:
if main_key:
hasRoot = True
payload = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=1),
'iat': datetime.datetime.utcnow(),
'sub': user_id,
'modulus': modulus if modulus else "No modulus is available",
'exponent': exponent if exponent else "No exponent is available",
'key': main_key if main_key else "No main key is available",
'hasRoot': hasRoot
}
return jwt.encode(
payload,
app.config.get('SECRET_KEY'),
)
except Exception as e:
return e
@staticmethod
def decode_public_key(auth_token):
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
return payload['modulus'], payload['exponent']
except:
return 'Invalid auth token'
@staticmethod
def decode_auth_token_key(auth_token):
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
if is_blacklisted_token:
return 'Token is blacklisted. Please login again.'
else:
key = [payload['modulus'], payload['exponent']]
return payload['sub'], key
except jwt.ExpiredSignatureError:
return 'Signature expired. Please login again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please login again.'
@staticmethod
def decode_auth_token(auth_token):
"""
Decodes the authentication token
:param auth_token:
:return: integer|string
"""
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
if is_blacklisted_token:
return 'Token is blacklisted. Please login again.'
else:
return payload['sub']
except jwt.ExpiredSignatureError:
return 'Signature expired. Please login again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please login again.'
@staticmethod
def decode_auth_key(auth_token):
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
return payload['modulus'], payload['exponent']
except jwt.ExpiredSignatureError:
return 'Signature expired. Please login again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please login again.'
class DeviceList(db.Model):
"""
Model for storing list of devices associate with a user
"""
__tablename__='device_list'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
mac_address = db.Column(db.String(), nullable=False)
os = db.Column(db.String(), nullable=False)
root= db.Column(db.Boolean, nullable=True, default=False)
main_key = db.Column(db.String(), nullable=False)
backup_key = db.Column(db.String(), nullable=False)
otp_modulus = db.Column(db.String(), nullable=False)
otp_exponent = db.Column(db.Integer, nullable=False)
encrypted_key = db.Column(db.String(), nullable=True)
def serialize(self):
return {
'id': self.id,
'mac_address': self.mac_address,
'os': self.os,
'registered_on': self.registered_on
}
def __init__(self, user, mac_address, backup_key,
main_key, otp_modulus, otp_exponent,os="Unknown",is_root=False):
self.user = user
self.backup_key =backup_key
self.registered_on = datetime.datetime.now()
self.main_key = main_key
self.otp_modulus=otp_modulus
self.otp_exponent=otp_exponent
self.mac_address=mac_address
self.os=os
self.root=is_root
self.encrypted_key = None
@staticmethod
def get_root_device(user_id):
return DeviceList.query.filter(and_(DeviceList.user.has(id=user_id),DeviceList.root==True)).first()
@staticmethod
def get_device_by_user_id_and_mac(user_id,mac):
return DeviceList.query.filter(and_(DeviceList.user.has(id=user_id),DeviceList.mac_address==mac)).first()
@staticmethod
def get_device_by_mac(mac):
return DeviceList.query.filter_by(mac_address=mac).first()
@staticmethod
def get_device_by_user_id(user_id):
return DeviceList.query.filter(DeviceList.user.has(id=user_id))
@staticmethod
def is_root(mac):
return DeviceList.get_device_by_mac(mac).root
class RSAPair(db.Model):
"""
RSAPair model for database mapping to create RSAPair table
which store RSA Key Pairs generated for each of login session
"""
__tablename__= 'rsa_key'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
public_modulus= db.Column(db.String(), nullable=False)
public_exponent= db.Column(db.Integer, nullable=False)
private_exponent = db.Column(db.String(), nullable=False)
def __init__(self,public_modulus, public_exponent, private_exponent):
"""
RSAPair Model Constructor
:params:
:modulus: public modulus
:exponent: public exponent
:key_mod: private modulus
:returns: void
"""
self.public_modulus = public_modulus
self.public_exponent = public_exponent
self.private_exponent = private_exponent
@staticmethod
def is_existed(key):
"""
Check if provided key is existed
:params: :key: list or RSA instance of the key
:returns: True or False
"""
if isinstance(key,str):
rsa_key = RSAPair.query.filter_by(public_modulus=key).first()
elif isinstance(key,list):
rsa_key = RSAPair.query.filter_by(public_modulus=key[0]).first()
else:
rsa_key = RSAPair.query.filter_by(public_modulus=str(key.n)).first()
return True if rsa_key else False
@staticmethod
def get_RSA_by_public(public_key):
"""
Get stored RSAPair from the public key
:params: :public_key: the corresponding public key
:returns: :RSAPair:
"""
if isinstance(public_key, list):
return RSAPair.query.filter_by(public_modulus=public_key[0]).first()
elif isinstance(public_key, str):
return RSAPair.query.filter_by(public_modulus=public_key).first()
elif isinstance(public_key, int):
return RSAPair.query.filter_by(public_modulus=str(public_key)).first()
else:
return RSAPair.query.filter_by(public_modulus=str(public_key.n)).first()
class BlacklistToken(db.Model):
"""
Token Model for storing JWT tokens
"""
__tablename__ = 'blacklist_tokens'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
token = db.Column(db.String(1024), unique=True, nullable=False)
blacklisted_on = db.Column(db.DateTime, nullable=False)
def __init__(self, token):
self.token = token
self.blacklisted_on = datetime.datetime.now()
def __repr__(self):
return '<id: token: {}'.format(self.token)
@staticmethod
def check_blacklist(auth_token):
result = BlacklistToken.query.filter_by(token=str(auth_token)).first()
return True if result else False
| 38.801653
| 113
| 0.640788
|
db69f2363f0b7af1ec32e62f2ec892c95f8dec78
| 3,002
|
py
|
Python
|
Code/odooerp/odoo-8.0/openerp/addons/stock/tests/test_owner_available.py
|
zhupangithub/WEBERP
|
714512082ec5c6db07cbf6af0238ceefe2d2c1a5
|
[
"MIT"
] | 1
|
2019-12-29T11:53:56.000Z
|
2019-12-29T11:53:56.000Z
|
odoo/addons/stock/tests/test_owner_available.py
|
tuanquanghpvn/odoo8-tutorial
|
52d25f1ca5f233c431cb9d3b24b79c3b4fb5127e
|
[
"MIT"
] | null | null | null |
odoo/addons/stock/tests/test_owner_available.py
|
tuanquanghpvn/odoo8-tutorial
|
52d25f1ca5f233c431cb9d3b24b79c3b4fb5127e
|
[
"MIT"
] | 3
|
2020-10-08T14:42:10.000Z
|
2022-01-28T14:12:29.000Z
|
# Author: Leonardo Pistone
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp.addons.stock.tests.common import TestStockCommon
class TestVirtualAvailable(TestStockCommon):
def setUp(self):
super(TestVirtualAvailable, self).setUp()
self.env['stock.quant'].create({
'product_id': self.productA.id,
'location_id': self.stock_location,
'qty': 30.0,
})
self.env['stock.quant'].create({
'product_id': self.productA.id,
'location_id': self.stock_location,
'qty': 10.0,
'owner_id': self.ref('base.res_partner_4'),
})
self.picking_out = self.env['stock.picking'].create({
'picking_type_id': self.ref('stock.picking_type_out')})
self.env['stock.move'].create({
'name': 'a move',
'product_id': self.productA.id,
'product_uom_qty': 3.0,
'product_uom': self.productA.uom_id.id,
'picking_id': self.picking_out.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
self.picking_out_2 = self.env['stock.picking'].create({
'picking_type_id': self.ref('stock.picking_type_out')})
self.env['stock.move'].create({
'restrict_partner_id': self.ref('base.res_partner_4'),
'name': 'another move',
'product_id': self.productA.id,
'product_uom_qty': 5.0,
'product_uom': self.productA.uom_id.id,
'picking_id': self.picking_out_2.id,
'location_id': self.stock_location,
'location_dest_id': self.customer_location})
def test_without_owner(self):
self.assertAlmostEqual(40.0, self.productA.virtual_available)
self.picking_out.action_assign()
self.picking_out_2.action_assign()
self.assertAlmostEqual(32.0, self.productA.virtual_available)
def test_with_owner(self):
prod_context = self.productA.with_context(
owner_id=self.ref('base.res_partner_4')
)
self.assertAlmostEqual(10.0, prod_context.virtual_available)
self.picking_out.action_assign()
self.picking_out_2.action_assign()
self.assertAlmostEqual(5.0, prod_context.virtual_available)
| 39.5
| 77
| 0.645903
|
aeee066f779b64113e3c5e8e3c7a5613db68b86c
| 1,114
|
py
|
Python
|
preprocessor/python/Z_common/process.py
|
SpiritFlag/FM0-Decoder
|
0b2f33309b8353fc9a820e1b9f56aa3c9cb85ebf
|
[
"MIT"
] | 2
|
2019-10-04T06:15:22.000Z
|
2021-02-02T06:52:48.000Z
|
preprocessor/python/Z_common/process.py
|
SpiritFlag/FM0-Decoder
|
0b2f33309b8353fc9a820e1b9f56aa3c9cb85ebf
|
[
"MIT"
] | null | null | null |
preprocessor/python/Z_common/process.py
|
SpiritFlag/FM0-Decoder
|
0b2f33309b8353fc9a820e1b9f56aa3c9cb85ebf
|
[
"MIT"
] | 1
|
2019-12-11T05:17:45.000Z
|
2019-12-11T05:17:45.000Z
|
import sys
import os
import timeit
from global_vars import *
def common_process(file, fnc, isFile):
try:
if file != None and os.path.exists(file):
select = input("FILE is already exist. Press 'Y' if you want to rewrite. ")
if select != 'Y':
print("Execution aborted..")
return
tot_time = timeit.default_timer()
if isFile is True:
for file_name in file_name_list:
try:
time = timeit.default_timer()
print("\n\n\t*** " + file_name + " ***")
fnc(file_name)
print("\n\t\tEXECUTION TIME= " + str(round(timeit.default_timer() - time, 3)) + " (sec)\n")
except Exception as ex:
_, _, tb = sys.exc_info()
print("[common_process:" + file_name + ":" + str(tb.tb_lineno) + "] " + str(ex) + "\n\n")
else:
fnc()
print("\t\tTOTAL EXECUTION TIME= " + str(round(timeit.default_timer() - tot_time, 3)) + " (sec)\n")
except Exception as ex:
_, _, tb = sys.exc_info()
print("[common_process:" + str(tb.tb_lineno) + "] " + str(ex) + "\n\n")
| 29.315789
| 104
| 0.548474
|
2dfcca5a1cf8a4247f615336de40c49a56c963df
| 2,105
|
py
|
Python
|
tests/utils/test_inference.py
|
wconnell/pytorch-metric-learning
|
1affee7c77bb5d6d4ee559bad62b910a21b39d48
|
[
"MIT"
] | 1
|
2021-05-30T14:59:42.000Z
|
2021-05-30T14:59:42.000Z
|
tests/utils/test_inference.py
|
umitkacar/pytorch-metric-learning
|
bf2b7675b7b80e5762b75428d51e4ab0a861e710
|
[
"MIT"
] | null | null | null |
tests/utils/test_inference.py
|
umitkacar/pytorch-metric-learning
|
bf2b7675b7b80e5762b75428d51e4ab0a861e710
|
[
"MIT"
] | null | null | null |
import unittest
from .. import TEST_DTYPES
import torch
import torchvision
from torchvision import datasets, transforms
from pytorch_metric_learning.utils.inference import InferenceModel
from pytorch_metric_learning.utils import common_functions
class TestInference(unittest.TestCase):
@classmethod
def setUpClass(cls):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
trunk = torchvision.models.resnet18(pretrained=True)
cls.emb_dim = trunk.fc.in_features
trunk.fc = common_functions.Identity()
trunk = torch.nn.DataParallel(trunk.to(device))
cls.model = trunk
transform = transforms.Compose([transforms.Resize(64),
transforms.ToTensor(),
transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
cls.dataset = datasets.FakeData(size=200, image_size=(3, 64, 64),
transform=transform)
def test_untrained_indexer(self):
inference_model = InferenceModel(trunk=self.model)
with self.assertRaises(RuntimeError):
inference_model.get_nearest_neighbors(self.dataset[0][0], k=10)
def test_get_nearest_neighbors(self):
inference_model = InferenceModel(trunk=self.model)
train_vectors = [self.dataset[i][0] for i in range(len(self.dataset))]
inference_model.train_indexer(train_vectors, self.emb_dim)
self.assertTrue(inference_model.indexer.index.is_trained)
indices, distances = inference_model.get_nearest_neighbors([train_vectors[0]], k=10)
# The closest image is the query itself
self.assertTrue(indices[0][0] == 0)
self.assertTrue(len(indices) == 1)
self.assertTrue(len(distances) == 1)
self.assertTrue(len(indices[0]) == 10)
self.assertTrue(len(distances[0]) == 10)
self.assertTrue((indices != -1).any())
| 38.981481
| 92
| 0.619002
|
c91ba5c33bd577c7f29bc1fb34ab7a5ab98e40db
| 600
|
py
|
Python
|
users/apps.py
|
Exaphis/Synchronous
|
17334442ee3d54d6704469bba8a53eb35c1f555e
|
[
"MIT"
] | null | null | null |
users/apps.py
|
Exaphis/Synchronous
|
17334442ee3d54d6704469bba8a53eb35c1f555e
|
[
"MIT"
] | null | null | null |
users/apps.py
|
Exaphis/Synchronous
|
17334442ee3d54d6704469bba8a53eb35c1f555e
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
from django.db.utils import OperationalError, ProgrammingError
class UsersConfig(AppConfig):
name = "users"
def ready(self):
from .models import WorkspaceUser
# delete all workspace users on server start (nobody's connected yet!)
# must occur because if django stops accidentally, users will not be deleted
# from the database.
try:
WorkspaceUser.objects.all().delete()
except (OperationalError, ProgrammingError):
# errors if migrating from empty database, ignore
pass
| 31.578947
| 84
| 0.675
|
300556dcc8a2885d1c361372f77ef429346feeec
| 6,584
|
py
|
Python
|
jiant/tasks/lib/wsc.py
|
yzpang/jiant
|
192d6b525c06f33010b59044df40cb86bbfba4ea
|
[
"MIT"
] | null | null | null |
jiant/tasks/lib/wsc.py
|
yzpang/jiant
|
192d6b525c06f33010b59044df40cb86bbfba4ea
|
[
"MIT"
] | null | null | null |
jiant/tasks/lib/wsc.py
|
yzpang/jiant
|
192d6b525c06f33010b59044df40cb86bbfba4ea
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
from dataclasses import dataclass
from typing import List
from jiant.tasks.core import (
BaseExample,
BaseTokenizedExample,
BaseDataRow,
BatchMixin,
SuperGlueMixin,
Task,
TaskTypes,
)
from jiant.tasks.lib.templates.shared import (
labels_to_bimap,
add_cls_token,
create_input_set_from_tokens_and_segments,
)
from jiant.tasks.utils import truncate_sequences, ExclusiveSpan
from jiant.utils import retokenize
from jiant.utils.python.io import read_json_lines
from jiant.utils.tokenization_normalization import normalize_tokenizations
@dataclass
class Example(BaseExample):
guid: str
text: str
span1_idx: int
span2_idx: int
span1_text: str
span2_text: str
label: str
def tokenize(self, tokenizer):
space_tokenization = self.text.split()
target_tokenization = tokenizer.tokenize(self.text)
normed_space_tokenization, normed_target_tokenization = normalize_tokenizations(
space_tokenization, target_tokenization, tokenizer
)
aligner = retokenize.TokenAligner(normed_space_tokenization, normed_target_tokenization)
span1_token_count = len(self.span1_text.split())
span2_token_count = len(self.span2_text.split())
target_span1 = ExclusiveSpan(
*aligner.project_token_span(self.span1_idx, self.span1_idx + span1_token_count)
)
target_span2 = ExclusiveSpan(
*aligner.project_token_span(self.span2_idx, self.span2_idx + span2_token_count)
)
return TokenizedExample(
guid=self.guid,
tokens=target_tokenization,
span1_span=target_span1,
span2_span=target_span2,
span1_text=self.span1_text,
span2_text=self.span2_text,
label_id=WSCTask.LABEL_TO_ID[self.label],
)
@dataclass
class TokenizedExample(BaseTokenizedExample):
guid: str
tokens: List
span1_span: ExclusiveSpan
span2_span: ExclusiveSpan
span1_text: str
span2_text: str
label_id: int
def featurize(self, tokenizer, feat_spec):
special_tokens_count = 2 # CLS, SEP
(tokens,) = truncate_sequences(
tokens_ls=[self.tokens], max_length=feat_spec.max_seq_length - special_tokens_count,
)
unpadded_tokens = tokens + [tokenizer.sep_token]
unpadded_segment_ids = [feat_spec.sequence_a_segment_id] * (len(self.tokens) + 1)
unpadded_inputs = add_cls_token(
unpadded_tokens=unpadded_tokens,
unpadded_segment_ids=unpadded_segment_ids,
tokenizer=tokenizer,
feat_spec=feat_spec,
)
input_set = create_input_set_from_tokens_and_segments(
unpadded_tokens=unpadded_inputs.unpadded_tokens,
unpadded_segment_ids=unpadded_inputs.unpadded_segment_ids,
tokenizer=tokenizer,
feat_spec=feat_spec,
)
span1_span = ExclusiveSpan(
start=self.span1_span[0] + unpadded_inputs.cls_offset,
end=self.span1_span[1] + unpadded_inputs.cls_offset,
).to_inclusive()
span2_span = ExclusiveSpan(
start=self.span2_span[0] + unpadded_inputs.cls_offset,
end=self.span2_span[1] + unpadded_inputs.cls_offset,
).to_inclusive()
return DataRow(
guid=self.guid,
input_ids=np.array(input_set.input_ids),
input_mask=np.array(input_set.input_mask),
segment_ids=np.array(input_set.segment_ids),
spans=np.array([span1_span, span2_span]),
label_id=self.label_id,
tokens=unpadded_inputs.unpadded_tokens,
span1_text=self.span1_text,
span2_text=self.span2_text,
)
@dataclass
class DataRow(BaseDataRow):
guid: str
input_ids: np.ndarray
input_mask: np.ndarray
segment_ids: np.ndarray
spans: np.ndarray
label_id: int
tokens: List
span1_text: str
span2_text: str
def get_tokens(self):
return [self.tokens]
@dataclass
class Batch(BatchMixin):
input_ids: torch.LongTensor
input_mask: torch.LongTensor
segment_ids: torch.LongTensor
spans: torch.LongTensor
label_id: torch.LongTensor
tokens: List
span1_text: List
span2_text: List
class WSCTask(SuperGlueMixin, Task):
Example = Example
TokenizedExample = Example
DataRow = DataRow
Batch = Batch
TASK_TYPE = TaskTypes.SPAN_COMPARISON_CLASSIFICATION
LABELS = [False, True]
LABEL_TO_ID, ID_TO_LABEL = labels_to_bimap(LABELS)
@property
def num_spans(self):
return 2
def get_train_examples(self):
return self._create_examples(lines=read_json_lines(self.train_path), set_type="train")
def get_val_examples(self):
return self._create_examples(lines=read_json_lines(self.val_path), set_type="val")
def get_test_examples(self):
return self._create_examples(lines=read_json_lines(self.test_path), set_type="test")
@classmethod
def _create_examples(cls, lines, set_type):
examples = []
for line in lines:
examples.append(
Example(
# NOTE: WSCTask.super_glue_format_preds() is dependent on this guid format.
guid="%s-%s" % (set_type, line["idx"]),
text=line["text"],
span1_idx=line["span1_index"],
span2_idx=line["span2_index"],
span1_text=line["span1_text"],
span2_text=line["span2_text"],
label=line["label"] if set_type != "test" else cls.LABELS[-1],
)
)
return examples
@classmethod
def super_glue_format_preds(cls, pred_dict):
"""Reformat this task's raw predictions to have the structure expected by SuperGLUE."""
lines = []
for pred, guid in zip(list(pred_dict["preds"]), list(pred_dict["guids"])):
lines.append({"idx": int(guid.split("-")[1]), "label": str(cls.LABELS[pred])})
return lines
def extract_char_span(full_text, span_text, space_index):
space_tokens = full_text.split()
extracted_span_text = space_tokens[space_index]
assert extracted_span_text.lower() in full_text.lower()
span_length = len(span_text)
if space_index == 0:
start = 0
else:
start = len(" ".join(space_tokens[:space_index])) + 1
# exclusive span
return ExclusiveSpan(start=start, end=start + span_length)
| 31.961165
| 96
| 0.659933
|
f81593adfd60de54dd3e2a8d09e2346a94427e12
| 2,575
|
py
|
Python
|
src/dashboard/__main__.py
|
karlicoss/dashboard
|
708eb183130de31e344fa0f38fa8ae74d1953e31
|
[
"MIT"
] | 13
|
2020-08-26T06:25:00.000Z
|
2021-07-23T13:54:19.000Z
|
src/dashboard/__main__.py
|
karlicoss/dashboard
|
708eb183130de31e344fa0f38fa8ae74d1953e31
|
[
"MIT"
] | null | null | null |
src/dashboard/__main__.py
|
karlicoss/dashboard
|
708eb183130de31e344fa0f38fa8ae74d1953e31
|
[
"MIT"
] | null | null | null |
import logging
from pathlib import Path
from typing import Iterable, Optional
from .tabs import tabs, Tab
from .settings import theme
# I guess it kinda makes sense to dump each tab separately
def render_tab(*, tab: Tab, filename: Path):
res = tab.plotter()
from bokeh.io import save, output_file, curdoc
output_file(filename, title=tab.name, mode='inline', root_dir=None)
curdoc().theme = theme
# TODO a bit weird that it needs two function calls to save..
save(res)
def run(to: Path, tab_name: Optional[str]=None, debug: bool=False) -> Iterable[Exception]:
for tab in tabs():
if isinstance(tab, Exception):
# todo collect errors in a separate tab? or just 'dashboard.html'?
yield tab
continue
if tab_name is not None and tab.name != tab_name:
logging.info('skipping %s', tab.name)
# todo error if no matches??
continue
logging.info('rendering %s', tab.name)
fname = to / (tab.name + '.html')
try:
if debug:
# todo optional dependency?
from ipdb import launch_ipdb_on_exception # type: ignore
ctx = launch_ipdb_on_exception
else:
from contextlib import nullcontext
ctx = nullcontext
with ctx():
res = render_tab(tab=tab, filename=fname)
except Exception as e:
# TODO make it defensive? if there were any errors, backup old file, don't overwrite? dunno.
logging.exception(e)
import html
import traceback
tb = '</br>'.join(html.escape(l) for l in traceback.format_exception(Exception, e, e.__traceback__))
fname.write_text(tb)
yield e
continue
def main():
logging.basicConfig(level=logging.INFO)
from argparse import ArgumentParser as P
p = P()
p.add_argument('--to', type=Path, required=True)
p.add_argument('--tab', type=str, help='Plot specific tab (by default plots all)')
p.add_argument('--debug', action='store_true', help='debug on exception')
args = p.parse_args()
# todo pass function names to render? seems easier than tab names? or either?
errors = list(run(to=args.to, tab_name=args.tab, debug=args.debug))
if len(errors) > 0:
logging.error('Had %d errors while rendering', len(errors))
for e in errors:
logging.exception(e)
import sys
sys.exit(1)
if __name__ == '__main__':
main()
| 31.790123
| 112
| 0.610485
|
d62ceaa2a29c74eba0257e3bf0b910fbede23915
| 525
|
py
|
Python
|
ex041.py
|
qnomon/Python-Studies
|
dbd592cf2a161bb9ddbec66f020c602bddc6d44b
|
[
"MIT"
] | null | null | null |
ex041.py
|
qnomon/Python-Studies
|
dbd592cf2a161bb9ddbec66f020c602bddc6d44b
|
[
"MIT"
] | null | null | null |
ex041.py
|
qnomon/Python-Studies
|
dbd592cf2a161bb9ddbec66f020c602bddc6d44b
|
[
"MIT"
] | null | null | null |
from datetime import date
ano = int(input('Digite o ano do seu nascimento: '))
i = date.today().year
idade = i - ano
if idade <= 9:
print(f'Você tem {idade} anos e pertence a categoria MIRIM')
elif idade <= 14:
print(f'Você tem {idade} anos e pertence a categoria INFANTIL')
elif idade <= 19:
print(f'Você tem {idade} anos e pertence a categoria JÚNIOR')
elif idade <= 20:
print(f'Você tem {idade} anos e pertence a categoria SÊNIOR')
else:
print(f'Você tem a {idade} anos e pertence a categoria MASTER')
| 37.5
| 67
| 0.691429
|
2804282987bbf6b7dd84a39b48c2c3e6429abc9c
| 6,140
|
py
|
Python
|
doc/src/julia_auto_encoder.py
|
benryan58/SemanticModels.jl
|
68da86b2501e55b8ddfb50b3df59b2406d374db2
|
[
"MIT"
] | null | null | null |
doc/src/julia_auto_encoder.py
|
benryan58/SemanticModels.jl
|
68da86b2501e55b8ddfb50b3df59b2406d374db2
|
[
"MIT"
] | null | null | null |
doc/src/julia_auto_encoder.py
|
benryan58/SemanticModels.jl
|
68da86b2501e55b8ddfb50b3df59b2406d374db2
|
[
"MIT"
] | null | null | null |
import argparse
from keras import regularizers
from keras.callbacks import EarlyStopping
from keras.layers import Input, GRU, RepeatVector, Activation, CuDNNGRU
from keras.layers import Dense, BatchNormalization, Embedding
from keras.models import Model, load_model
from keras.optimizers import Adam
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import numpy as np
import os
import pandas as pd
def ae_models(maxlen, latent_dim, N, use_gpu=False):
inputs = Input((maxlen,), name='Encoder_Inputs')
encoded = Embedding(N,
latent_dim,
name='Char_Embedding',
mask_zero=False)(inputs)
encoded = BatchNormalization(name='BatchNorm_Encoder')(encoded)
if use_gpu:
_, state_h = CuDNNGRU(latent_dim, return_state=True)(encoded)
else:
_, state_h = GRU(latent_dim, return_state=True)(encoded)
enc = Model(inputs=inputs, outputs=state_h, name='Encoder_Model')
enc_out = enc(inputs)
dec_inputs = Input(shape=(None,), name='Decoder_Inputs')
decoded = Embedding(N,
latent_dim,
name='Decoder_Embedding',
mask_zero=False)(dec_inputs)
decoded = BatchNormalization(name='BatchNorm_Decoder_1')(decoded)
if use_gpu:
dec_out, _ = CuDNNGRU(latent_dim,
return_state=True,
return_sequences=True)(decoded, initial_state=enc_out)
else:
dec_out, _ = GRU(latent_dim,
return_state=True,
return_sequences=True)(decoded, initial_state=enc_out)
dec_out = BatchNormalization(name='BatchNorm_Decoder_2')(dec_out)
dec_out = Dense(N, activation='softmax', name='Final_Out')(dec_out)
sequence_autoencoder = Model(inputs=[inputs, dec_inputs], outputs=dec_out)
return sequence_autoencoder, enc
def build_and_train(args):
seqs = get_seqs(args)
N = len(np.unique(seqs))
decoder_inputs = seqs[:, :-1]
Y = seqs[:, 1: ]
autoencoder, enc = ae_models(args.max_len,
args.dimension,
N,
use_gpu=args.use_gpu)
autoencoder.compile(loss='sparse_categorical_crossentropy',
optimizer=Adam(lr=0.001, amsgrad=True),
metrics=['accuracy'])
early_stop = EarlyStopping(monitor='val_acc',
min_delta=0.0001,
patience=10,
verbose=1,
mode='auto',
restore_best_weights=True)
autoencoder.fit([seqs, decoder_inputs],
np.expand_dims(Y, -1),
epochs = 100,
batch_size = 32,
validation_split=0.12,
callbacks=[early_stop],
shuffle=True)
return autoencoder, enc
def chars_to_indices(data, tok=None, max_len=None):
if max_len is None:
max_len = max(data.apply(lambda x: len(x)))
if tok is None:
tok = Tokenizer(num_words=None,
filters="",
lower=False,
split='',
char_level=True)
data = data.values
tok.fit_on_texts(data)
sequences = tok.texts_to_sequences(data)
sequences = pad_sequences(sequences,
maxlen=max_len,
padding='post')
sequences = np.array(sequences, dtype='int16')
return sequences, tok
def get_seqs(args):
with open(args.data, "r") as f:
funcs = f.read()
funcs = process_code(funcs, args.maxlen)
seqs, _ = chars_to_indices(funcs.code, max_len=args.maxlen)
return seqs
def process_code(funcs, max_len=500):
funcs = funcs.split(".jl\n")
funcs = funcs[:-1] # remove trailing empty item
funcs = pd.DataFrame([x.rsplit("\t",1) for x in funcs])
funcs.columns = ['code','source']
# limit length of code snippets, which are rarely huge
funcs = funcs[funcs.code.str.len()<=max_len]
funcs.reset_index(drop=True, inplace=True)
funcs.source = funcs.source.apply(lambda x: x[x.index("julia/")+6:])
funcs["top_folder"] = funcs.source.apply(lambda x: x[:x.index("/")])
funcs['top2'] = funcs.source.apply(lambda x: '_'.join(x.split("/")[:2]))
return funcs
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Auto-encode Julia source code.')
parser.add_argument('-D', '--dir', default=os.getcwd(),
help='Directory for trained models.')
parser.add_argument('-d', '--data', required=True,
help='The extracted source code snippets for encoding.')
parser.add_argument('--maxlen', default=500, type=int,
help='Maximum code snippet length.')
parser.add_argument('--dimension', default=64, type=int,
help='Encoding dimension for representation.')
parser.add_argument('--use_gpu', action="store_true",
help='Should we use the GPU if available?')
parser.add_argument('-m', '--mode', default='train', options=['train','encode']
help='Mode for auto-encoder [train, encode].')
args = parser.parse_args()
if args.mode == 'train':
autoenc, enc = build_and_train(args)
autoencoder.save(os.path.join(args.dir,"autoencoder.h5"))
enc.save(os.path.join(args.dir,"encoder.h5"))
elif args.mode == 'encode':
if not os.path.isabs(args.data):
args.data = os.path.join(os.getcwd(), args.data)
data_dir = os.path.dirname(args.data)
enc = load_model(os.path.join(args.dir, "encoder.h5"))
seqs = get_seqs(args)
encoded_reps = enc.predict(seqs)
encoded_reps = pd.DataFrame(encoded_reps)
encoded_reps.to_csv(os.path.join(data_dir, "encoded_reps.csv"), index=False)
| 36.766467
| 84
| 0.587459
|
605fc2345602383b85494ca6f6f97fac5819b085
| 949
|
py
|
Python
|
fib.py
|
TotallyNotTito/redesigned-octo-spoon
|
ff6e65536df8dc1a16cfc504571630423084ba05
|
[
"MIT"
] | 1
|
2021-08-24T20:09:07.000Z
|
2021-08-24T20:09:07.000Z
|
fib.py
|
TotallyNotTito/redesigned-octo-spoon
|
ff6e65536df8dc1a16cfc504571630423084ba05
|
[
"MIT"
] | null | null | null |
fib.py
|
TotallyNotTito/redesigned-octo-spoon
|
ff6e65536df8dc1a16cfc504571630423084ba05
|
[
"MIT"
] | null | null | null |
#code for input
print("This calculator givens the Nth fibonaci term in a sequence. Inputting the iteration outputs the number in that sequence")
print('input a fibonacci number')
n = float(input())
# code for first 2 outputs of fibonacci sequence
if (n == 1 or n == 2) :
print(1, 'is fibonacci output')
# main code for fibonacci output
else :
prev = 1 # output when fib is 2
prevPrev = 1 # output for when fib is 1
counter = 0 # initialize while loop
fib = 1 # first fib output in fib sequence
while (counter < n - 2) : # counter is at - 2 since first 2 outputs are in first conditional
prevPrev = prev # want to move the last position up along the output ladder
prev = fib # want the previous value to equal the fib output
fib = prev + prevPrev # want to add the last 2 outputs to equal current output
counter = counter + 1 # want to terminate loop
print(fib, '\nis fibonacci output')
| 43.136364
| 128
| 0.684932
|
039ea1776edf147865d574ec66ce4007986a1548
| 1,849
|
py
|
Python
|
frappe/templates/pages/integrations/razorpay_checkout.py
|
chentaoz/frappe
|
ee3c4943bf6177ad3b410cdb0d802af486751a65
|
[
"MIT"
] | 5
|
2017-09-12T15:56:31.000Z
|
2022-03-09T13:50:21.000Z
|
frappe/templates/pages/integrations/razorpay_checkout.py
|
chentaoz/frappe
|
ee3c4943bf6177ad3b410cdb0d802af486751a65
|
[
"MIT"
] | 66
|
2020-02-28T07:47:09.000Z
|
2021-07-21T04:53:35.000Z
|
frappe/templates/pages/integrations/razorpay_checkout.py
|
chentaoz/frappe
|
ee3c4943bf6177ad3b410cdb0d802af486751a65
|
[
"MIT"
] | 8
|
2019-04-21T07:49:50.000Z
|
2021-12-24T20:20:38.000Z
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint
import json
from six import string_types
no_cache = 1
expected_keys = ('amount', 'title', 'description', 'reference_doctype', 'reference_docname',
'payer_name', 'payer_email', 'order_id')
def get_context(context):
context.no_cache = 1
context.api_key = get_api_key()
try:
doc = frappe.get_doc("Integration Request", frappe.form_dict['token'])
payment_details = json.loads(doc.data)
for key in expected_keys:
context[key] = payment_details[key]
context['token'] = frappe.form_dict['token']
context['amount'] = flt(context['amount'])
context['subscription_id'] = payment_details['subscription_id'] \
if payment_details.get('subscription_id') else ''
except Exception as e:
frappe.redirect_to_message(_('Invalid Token'),
_('Seems token you are using is invalid!'),
http_status_code=400, indicator_color='red')
frappe.local.flags.redirect_location = frappe.local.response.location
raise frappe.Redirect
def get_api_key():
api_key = frappe.db.get_value("Razorpay Settings", None, "api_key")
if cint(frappe.form_dict.get("use_sandbox")):
api_key = frappe.conf.sandbox_api_key
return api_key
@frappe.whitelist(allow_guest=True)
def make_payment(razorpay_payment_id, options, reference_doctype, reference_docname, token):
data = {}
if isinstance(options, string_types):
data = json.loads(options)
data.update({
"razorpay_payment_id": razorpay_payment_id,
"reference_docname": reference_docname,
"reference_doctype": reference_doctype,
"token": token
})
data = frappe.get_doc("Razorpay Settings").create_request(data)
frappe.db.commit()
return data
| 29.349206
| 92
| 0.756084
|
622ed5d851f0b721e3ecfa994adaed50a7f47e98
| 3,783
|
py
|
Python
|
AER/FeatureExtraction/MelFilterBank.py
|
LeBenchmark/Interspeech2021
|
2a3b424389631b317b39973291b7252bbf44a73b
|
[
"MIT"
] | 48
|
2021-03-25T14:00:04.000Z
|
2022-03-27T17:00:00.000Z
|
AER/FeatureExtraction/MelFilterBank.py
|
LeBenchmark/Interspeech2021
|
2a3b424389631b317b39973291b7252bbf44a73b
|
[
"MIT"
] | 2
|
2021-04-16T13:21:44.000Z
|
2021-06-16T15:23:09.000Z
|
AER/FeatureExtraction/MelFilterBank.py
|
LeBenchmark/Interspeech2021
|
2a3b424389631b317b39973291b7252bbf44a73b
|
[
"MIT"
] | 2
|
2021-07-05T13:42:23.000Z
|
2021-09-01T10:24:00.000Z
|
import os, sys
import argparse
from funcs import get_files_in_path
from funcs import printProgressBar
from pydub import AudioSegment
from python_speech_features import logfbank
import numpy as np
import pandas as pd
from featFuncs import loadFromJson
from DatasetHandling.DataClasses import Features, classToDic
import json
def main(featsFolder, jsonPath):
"""
Adding mel-frequency filterbank features to a given dataset and writting its reference to a json file
Example
----------
python MelFilterBank.py -f "MFB" -j "/mnt/HD-Storage/Datasets/Recola_46/data.json"
python MelFilterBank.py -f "MFB" -j "/mnt/HD-Storage/Datasets/AlloSat/data.json"
python MelFilterBank.py -f "MFB" -j "/mnt/HD-Storage/Datasets/Recola_46_S/data.json"
"""
samples = loadFromJson(jsonPath)
for i, ID in enumerate(samples.keys()):
sample = samples[ID]
wavePath = sample["path"]
wavsFolder = wavePath.split(os.sep)[0]
waveFullPath = os.path.join(os.path.split(jsonPath)[0], wavePath)
featsLocalPath = wavePath.replace(wavsFolder, featsFolder).replace(".wav", ".csv")
featsLocalPath = os.path.join("Feats", featsLocalPath)
featsFullPath = os.path.join(os.path.split(jsonPath)[0], featsLocalPath)
# print(featsLocalPath, featsFullPath)
dim = makeFeatsCsv(waveFullPath, featsFullPath)
if dim == 0: continue
featsDict = getFeatsDict(dim, featsFolder, featsLocalPath)
samples[ID]["features"][featsDict["ID"]] = featsDict
# saveToJson(jsonPath, sample)
printProgressBar(i + 1, len(samples), prefix = 'Adding mel-frequency filterbank features:', suffix = 'Complete', length = "fit")
with open(jsonPath, 'w') as jsonFile:
json.dump(samples, jsonFile, indent=4, ensure_ascii=False)
def getFeatsDict(dim, featsFolder, path):
feats = Features()
feats.setParams(featsFolder, "Acoustic", dim, path)
return classToDic(feats)
def makeFeatsCsv(wavePath, outPath):
winlen=0.025; winstep=0.01
fbank_feat = getFeatsFromWav(wavePath, winlen=winlen, winstep=winstep)
dim = list(fbank_feat.shape)
header = ['feat_'+str(i) for i in range(len(fbank_feat[0]))]
df = pd.DataFrame(data=fbank_feat,columns=header)
length = len(fbank_feat)
timesStart = []
timesEnd = []
time = 0
for _ in range(length):
timesStart.append(time)
timesEnd.append(time + winlen)
time += winstep
df.insert(0, 'frameTimeEnd', timesEnd)
df.insert(0, 'frameTimeStart', timesStart)
dirname = os.path.dirname(outPath)
if not os.path.exists(dirname): os.makedirs(dirname)
with open(outPath, 'w', encoding='utf-8') as f:
df.to_csv(f, index=False)
return dim
def getFeatsFromWav(path, winlen=0.025, winstep=0.01):
audio_file = AudioSegment.from_wav(path)
sig = np.array(audio_file.get_array_of_samples())
sig = sig / 32767.0 # scaling for 16-bit integer
rate = audio_file.frame_rate
fbank_feat = logfbank(sig, rate, winlen=winlen, winstep=winstep, nfilt=40, nfft=2028, lowfreq=0, highfreq=None, preemph=0.97) #, winfunc=np.hanning
return fbank_feat
if __name__== "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--featsFolder', '-f', help="features folder name")
parser.add_argument('--json', '-j', help="json file path")
args = parser.parse_args()
Flag = False
if args.featsFolder is None: Flag=True
if args.json is None: Flag=True
if Flag:
print(main.__doc__)
parser.print_help()
else:
main(args.featsFolder, args.json)
# wavPath = "../../Data/WavsProcessed"
# csvPath = "../../Data/Feats_MFB"
# calc_mfbs(wavPath, csvPath, rate=16000)
| 38.602041
| 151
| 0.679355
|
08cbd85cf1d64efbfed2800c1ed0236ffc01e708
| 1,563
|
py
|
Python
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/urls.py
|
taaviteska/django-template
|
63454ff55faa38dbe1fcf4e93bc0af475add2b7c
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/urls.py
|
taaviteska/django-template
|
63454ff55faa38dbe1fcf4e93bc0af475add2b7c
|
[
"MIT"
] | null | null | null |
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/urls.py
|
taaviteska/django-template
|
63454ff55faa38dbe1fcf4e93bc0af475add2b7c
|
[
"MIT"
] | null | null | null |
"""{{cookiecutter.project_title}} URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.i18n import i18n_patterns
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic.base import TemplateView
from django.views.i18n import JavaScriptCatalog
urlpatterns = i18n_patterns(
url(r'^$', TemplateView.as_view(template_name='home.html'), name='home'),
url(r'^accounts/', include('accounts.urls')),
url(r'^jsi18n/$', JavaScriptCatalog.as_view(), name='javascript-catalog'),
url(r'^manage/', admin.site.urls),
prefix_default_language=False,
)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
| 38.121951
| 79
| 0.734485
|
eada264ac6309ca67f8a189a689f00bcf5f50a79
| 4,388
|
py
|
Python
|
src/infi/execute/result.py
|
Infinidat/infi.execute
|
4bee7eb9993ca1e7769244eb569df49e328a4c48
|
[
"BSD-3-Clause"
] | 1
|
2020-08-18T16:26:01.000Z
|
2020-08-18T16:26:01.000Z
|
src/infi/execute/result.py
|
Infinidat/infi.execute
|
4bee7eb9993ca1e7769244eb569df49e328a4c48
|
[
"BSD-3-Clause"
] | null | null | null |
src/infi/execute/result.py
|
Infinidat/infi.execute
|
4bee7eb9993ca1e7769244eb569df49e328a4c48
|
[
"BSD-3-Clause"
] | 2
|
2015-09-27T13:18:08.000Z
|
2020-08-18T16:26:05.000Z
|
from .waiting import wait_for_many_results, flush
from .exceptions import CommandTimeout
from .exceptions import ExecutionError
from .utils import make_fd_non_blocking, non_blocking_read, non_blocking_write
try:
from cStringIO import StringIO as BytesIO
except ImportError:
from io import BytesIO
import os
import signal
from .ioloop import time, sleep
MAX_INPUT_CHUNK_SIZE = 1024
class Result(object):
def __init__(self, command, popen, stdin, assert_success, timeout):
super(Result, self).__init__()
self._command = command
self._popen = popen
self._output = BytesIO()
self._input = BytesIO(stdin or b'')
self._error = BytesIO()
self._assert_success = assert_success
self._deadline = None
if timeout is not None:
self._deadline = time() + timeout
make_fd_non_blocking(self._popen.stdout.fileno())
make_fd_non_blocking(self._popen.stderr.fileno())
def get_deadline(self):
return self._deadline
def get_returncode(self):
return self._popen.returncode
def kill(self, sig=signal.SIGTERM):
if not self.is_finished():
os.kill(self.get_pid(), sig)
sleep(0)
def register_to_ioloop(self, ioloop):
if self._popen.stdout is not None:
self._register_stdout(ioloop)
if self._popen.stderr is not None:
self._register_stderr(ioloop)
if self._popen.stdin is not None:
self._register_stdin(ioloop)
def _register_stdin(self, ioloop):
ioloop.register_write(self._popen.stdin, self._handle_stdin)
def _register_stdout(self, ioloop):
ioloop.register_read(self._popen.stdout, self._handle_stdout)
def _register_stderr(self, ioloop):
ioloop.register_read(self._popen.stderr, self._handle_stderr)
def unregister_from_ioloop(self, ioloop):
if self._popen.stdout is not None:
ioloop.unregister_read(self._popen.stdout)
if self._popen.stderr is not None:
ioloop.unregister_read(self._popen.stderr)
if self._popen.stdin is not None:
ioloop.unregister_write(self._popen.stdin)
def _handle_stdout(self, ioloop, f, count=-1):
""" because anonymous pipes in windows can be blocked, we need to pay attention
on how much we read
"""
output = non_blocking_read(self._popen.stdout, count)
if not output:
self._popen.stdout.close()
self._popen.stdout = None
else:
self._output.write(output)
self._register_stdout(ioloop)
def _handle_stdin(self, ioloop, f):
input = self._input.read(MAX_INPUT_CHUNK_SIZE)
non_blocking_write(self._popen.stdin, input)
if len(input) < MAX_INPUT_CHUNK_SIZE:
self._popen.stdin.close()
self._popen.stdin = None
else:
self._register_stdin(ioloop)
def _handle_stderr(self, ioloop, f, count=-1):
""" because anonymous pipes in windows can be blocked, we need to pay attention
on how much we read
"""
output = non_blocking_read(self._popen.stderr, count)
if not output:
self._popen.stderr.close()
self._popen.stderr = None
else:
self._error.write(output)
self._register_stderr(ioloop)
def poll(self):
self._popen.poll()
self._check_return_code()
return self.get_returncode()
def _check_return_code(self):
returncode = self.get_returncode()
if returncode is not None:
flush(self)
if self._assert_success and returncode is not None and returncode != 0:
raise ExecutionError(self)
def wait(self, timeout=None):
returned_results = wait_for_many_results([self], timeout=timeout)
returned = self in returned_results
if not returned and (self.get_deadline() or timeout):
raise CommandTimeout(self)
return returned
def is_finished(self):
return self.poll() is not None
def __repr__(self):
return "<pid %s: %s>" % (self.get_pid(), self._command)
def get_pid(self):
return self._popen.pid
def get_stdout(self):
return self._output.getvalue()
def get_stderr(self):
return self._error.getvalue()
| 33.496183
| 87
| 0.649043
|
5ead4e5c3131b3ccb8de7e16f5f55964414cf575
| 153,945
|
py
|
Python
|
tests/test_bert_ner.py
|
bond005/bert_ner
|
cd5bfaf97511955934eb2faf3eb97e9e454e989a
|
[
"Apache-2.0"
] | 2
|
2019-02-18T19:12:39.000Z
|
2019-04-22T05:11:03.000Z
|
tests/test_bert_ner.py
|
bond005/bert_ner
|
cd5bfaf97511955934eb2faf3eb97e9e454e989a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_bert_ner.py
|
bond005/bert_ner
|
cd5bfaf97511955934eb2faf3eb97e9e454e989a
|
[
"Apache-2.0"
] | null | null | null |
import copy
import gc
import os
import pickle
import re
import sys
import tempfile
import unittest
import numpy as np
from sklearn.exceptions import NotFittedError
try:
from bert_ner.bert_ner import BERT_NER
from bert_ner.utils import load_dataset
except:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from bert_ner.bert_ner import BERT_NER
from bert_ner.utils import load_dataset
class TestBertNer(unittest.TestCase):
def tearDown(self):
if hasattr(self, 'ner'):
del self.ner
if hasattr(self, 'another_ner'):
del self.another_ner
if hasattr(self, 'temp_file_name'):
if os.path.isfile(self.temp_file_name):
os.remove(self.temp_file_name)
def test_creation(self):
self.ner = BERT_NER()
self.assertIsInstance(self.ner, BERT_NER)
self.assertTrue(hasattr(self.ner, 'batch_size'))
self.assertTrue(hasattr(self.ner, 'lstm_units'))
self.assertTrue(hasattr(self.ner, 'lr'))
self.assertTrue(hasattr(self.ner, 'l2_reg'))
self.assertTrue(hasattr(self.ner, 'clip_norm'))
self.assertTrue(hasattr(self.ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.ner, 'finetune_bert'))
self.assertTrue(hasattr(self.ner, 'max_epochs'))
self.assertTrue(hasattr(self.ner, 'patience'))
self.assertTrue(hasattr(self.ner, 'random_seed'))
self.assertTrue(hasattr(self.ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.ner, 'max_seq_length'))
self.assertTrue(hasattr(self.ner, 'validation_fraction'))
self.assertTrue(hasattr(self.ner, 'verbose'))
self.assertIsInstance(self.ner.batch_size, int)
self.assertIsInstance(self.ner.lstm_units, int)
self.assertIsInstance(self.ner.lr, float)
self.assertIsInstance(self.ner.l2_reg, float)
self.assertIsInstance(self.ner.clip_norm, float)
self.assertIsInstance(self.ner.bert_hub_module_handle, str)
self.assertIsInstance(self.ner.finetune_bert, bool)
self.assertIsInstance(self.ner.max_epochs, int)
self.assertIsInstance(self.ner.patience, int)
self.assertIsNone(self.ner.random_seed)
self.assertIsInstance(self.ner.gpu_memory_frac, float)
self.assertIsInstance(self.ner.max_seq_length, int)
self.assertIsInstance(self.ner.validation_fraction, float)
self.assertIsInstance(self.ner.verbose, bool)
def test_check_params_positive(self):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1', finetune_bert=True,
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=None
)
self.assertTrue(True)
def test_check_params_negative001(self):
true_err_msg = re.escape('`bert_hub_module_handle` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative002(self):
true_err_msg = re.escape('`bert_hub_module_handle` is wrong! Expected `{0}`, got `{1}`.'.format(
type('abc'), type(123)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle=1, finetune_bert=True,
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative003(self):
true_err_msg = re.escape('`batch_size` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative004(self):
true_err_msg = re.escape('`batch_size` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size='32', max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative005(self):
true_err_msg = re.escape('`batch_size` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=-3, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative006(self):
true_err_msg = re.escape('`max_epochs` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative007(self):
true_err_msg = re.escape('`max_epochs` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs='10', patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128
)
def test_check_params_negative008(self):
true_err_msg = re.escape('`max_epochs` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=-3, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative009(self):
true_err_msg = re.escape('`patience` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative010(self):
true_err_msg = re.escape('`patience` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience='3', gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128
)
def test_check_params_negative011(self):
true_err_msg = re.escape('`patience` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=-3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative012(self):
true_err_msg = re.escape('`max_seq_length` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative013(self):
true_err_msg = re.escape('`max_seq_length` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length='512', lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative014(self):
true_err_msg = re.escape('`max_seq_length` is wrong! Expected a positive integer value, but -3 is not '
'positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=-3, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative015(self):
true_err_msg = re.escape('`validation_fraction` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative016(self):
true_err_msg = re.escape('`validation_fraction` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction='0.1', max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128
)
def test_check_params_negative017(self):
true_err_msg = '`validation_fraction` is wrong! Expected a positive floating-point value less than 1.0, but ' \
'{0} is not positive.'.format(-0.1)
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=-0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative018(self):
true_err_msg = '`validation_fraction` is wrong! Expected a positive floating-point value less than 1.0, but ' \
'{0} is not less than 1.0.'.format(1.1)
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=1.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative019(self):
true_err_msg = re.escape('`gpu_memory_frac` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative020(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac='1.0', verbose=False,
random_seed=42, lstm_units=128
)
def test_check_params_negative021(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected a floating-point value in the (0.0, 1.0], '
'but {0} is not proper.'.format(-1.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=-1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative022(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected a floating-point value in the (0.0, 1.0], '
'but {0} is not proper.'.format(1.3))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.3, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative023(self):
true_err_msg = re.escape('`lr` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative024(self):
true_err_msg = re.escape('`lr` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr='1e-3', l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative025(self):
true_err_msg = re.escape('`lr` is wrong! Expected a positive floating-point value, but {0} is not '
'positive.'.format(0.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=0.0, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative026(self):
true_err_msg = re.escape('`lr` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative027(self):
true_err_msg = re.escape('`lr` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr='1e-3', l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative028(self):
true_err_msg = re.escape('`lr` is wrong! Expected a positive floating-point value, but {0} is not '
'positive.'.format(0.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=0.0, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative029(self):
true_err_msg = re.escape('`l2_reg` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative030(self):
true_err_msg = re.escape('`l2_reg` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg='1e-4', clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative031(self):
true_err_msg = re.escape('`l2_reg` is wrong! Expected a non-negative floating-point value, but {0} is '
'negative.'.format(-2.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=-2.0, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative032(self):
true_err_msg = re.escape('`finetune_bert` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, validation_fraction=0.1, clip_norm=5.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128
)
def test_check_params_negative033(self):
true_err_msg = re.escape('`finetune_bert` is wrong! Expected `{0}`, got `{1}`.'.format(
type(True), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert='True', batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128
)
def test_check_params_negative034(self):
true_err_msg = re.escape('`verbose` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, random_seed=42, lstm_units=128
)
def test_check_params_negative035(self):
true_err_msg = re.escape('`verbose` is wrong! Expected `{0}`, got `{1}`.'.format(
type(True), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose='False',
random_seed=42, lstm_units=128
)
def test_check_params_negative036(self):
true_err_msg = re.escape('`lstm_units` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42
)
def test_check_params_negative037(self):
true_err_msg = re.escape('`lstm_units` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
lstm_units='128', finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4,
clip_norm=5.0, validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42
)
def test_check_params_negative038(self):
true_err_msg = re.escape('`lstm_units` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=-3
)
def test_check_X_positive(self):
X = ['abc', 'defgh', '4wdffg']
BERT_NER.check_X(X, 'X_train')
self.assertTrue(True)
def test_check_X_negative01(self):
X = {'abc', 'defgh', '4wdffg'}
true_err_msg = re.escape('`X_train` is wrong, because it is not list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def test_check_X_negative02(self):
X = np.random.uniform(-1.0, 1.0, (10, 2))
true_err_msg = re.escape('`X_train` is wrong, because it is not 1-D list!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def test_check_X_negative03(self):
X = ['abc', 23, '4wdffg']
true_err_msg = re.escape('Item 1 of `X_train` is wrong, because it is not string-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def text_check_Xy_positive(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_classes_list = ('LOC', 'ORG', 'PER')
self.assertEqual(true_classes_list, BERT_NER.check_Xy(X, 'X_train', y, 'y_train'))
def text_check_Xy_negative01(self):
X = {
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
}
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('`X_train` is wrong, because it is not list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative02(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = {
'1': {
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
'2': {
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
}
true_err_msg = re.escape('`y_train` is wrong, because it is not a list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative03(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = np.random.uniform(-1.0, 1.0, (10, 2))
true_err_msg = re.escape('`y_train` is wrong, because it is not 1-D list!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative04(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
},
{
'LOC': [(17, 24), (117, 130)]
}
]
true_err_msg = re.escape('Length of `X_train` does not correspond to length of `y_train`! 2 != 3')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative05(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
4
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because it is not a dictionary-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative06(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
1: [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because its key `1` is not a string-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative07(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'O': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `O` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative08(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'123': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `123` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative09(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'loc': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `loc` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative10(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': {1, 2}
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because its value `{0}` is not a list-like '
'object!'.format(y[0]['PER']))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative11(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), 63],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because named entity bounds `63` are not specified as '
'list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative12(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77, 81)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because named entity bounds `{0}` are not specified as '
'2-D list!'.format((63, 77, 81)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative13(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (219, 196)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((219, 196)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative14(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 519)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((196, 519)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative15(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(-1, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((-1, 137)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def test_calculate_bounds_of_tokens_positive01(self):
source_text = 'Совершенно новую технологию перекачки российской водки за рубеж начали использовать ' \
'контрабандисты.'
tokenized_text = ['Со', '##вер', '##шен', '##но', 'новую', 'тех', '##но', '##логи', '##ю', 'пер', '##ека',
'##чки', 'российской', 'вод', '##ки', 'за', 'р', '##уб', '##еж', 'начали', 'использовать',
'кон', '##тра', '##бан', '##ди', '##сты', '.']
true_bounds = [(0, 2), (2, 5), (5, 8), (8, 10), (11, 16), (17, 20), (20, 22), (22, 26), (26, 27), (28, 31),
(31, 34), (34, 37), (38, 48), (49, 52), (52, 54), (55, 57), (58, 59), (59, 61), (61, 63),
(64, 70), (71, 83), (84, 87), (87, 90), (90, 93), (93, 95), (95, 98), (98, 99)]
self.assertEqual(true_bounds, BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text))
def test_calculate_bounds_of_tokens_positive02(self):
source_text = 'Кстати за два дня до итальянцев, мальтийские пограничники уже задерживали лодку. ' \
'Однако они только дали им топливо, помогли завести двигатель и указали дорогу.'
tokenized_text = ['К', '##стат', '##и', 'за', 'два', 'дня', 'до', 'итал', '##ья', '##нцев', ',', 'мал', '##ьт',
'##ий', '##ские', 'по', '##гра', '##ни', '##чники', 'уже', 'за', '##дер', '##живал', '##и',
'ло', '##дку', '.', 'Однако', 'они', 'только', 'дали', 'им', 'топ', '##ливо', ',', 'пом',
'##ог', '##ли', 'за', '##вести', 'д', '##вигатель', 'и', 'ук', '##аза', '##ли', 'дорог',
'##у', '.']
true_bounds = [(0, 1), (1, 5), (5, 6), (7, 9), (10, 13), (14, 17), (18, 20), (21, 25), (25, 27), (27, 31),
(31, 32), (33, 36), (36, 38), (38, 40), (40, 44), (45, 47), (47, 50), (50, 52), (52, 57),
(58, 61), (62, 64), (64, 67), (67, 72), (72, 73), (74, 76), (76, 79), (79, 80), (81, 87),
(88, 91), (92, 98), (99, 103), (104, 106), (107, 110), (110, 114), (114, 115), (116, 119),
(119, 121), (121, 123), (124, 126), (126, 131), (132, 133), (133, 141), (142, 143), (144, 146),
(146, 149), (149, 151), (152, 157), (157, 158), (158, 159)]
self.assertEqual(true_bounds, BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text))
def test_calculate_bounds_of_tokens_positive03(self):
source_text = 'Один из последних представителей клады, тираннозавр (Tyrannosaurus rex), живший 66–67 ' \
'миллионов лет назад, был одним из крупнейших когда-либо живших сухопутных хищников'
tokenized_text = ['Один', 'из', 'последних', 'представителей', 'к', '##лады', ',', 'ти', '##ран', '##но',
'##за', '##вр', '(', 'Ty', '##ranno', '##saurus', 'rex', ')', ',', 'жив', '##ший', '66',
'[UNK]', '67', 'миллионов', 'лет', 'назад', ',', 'был', 'одним', 'из', 'крупнейших', 'когда',
'-', 'либо', 'жив', '##ших', 'су', '##хо', '##пу', '##тных', 'х', '##и', '##щ', '##ников']
true_bounds = [(0, 4), (5, 7), (8, 17), (18, 32), (33, 34), (34, 38), (38, 39), (40, 42), (42, 45), (45, 47),
(47, 49), (49, 51), (52, 53), (53, 55), (55, 60), (60, 66), (67, 70), (70, 71), (71, 72),
(73, 76), (76, 79), (80, 82), (82, 83), (83, 85), (86, 95), (96, 99), (100, 105), (105, 106),
(107, 110), (111, 116), (117, 119), (120, 130), (131, 136), (136, 137), (137, 141), (142, 145),
(145, 148), (149, 151), (151, 153), (153, 155), (155, 159), (160, 161), (161, 162), (162, 163),
(163, 168)]
self.assertEqual(true_bounds, BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text))
def test_calculate_bounds_of_tokens_positive04(self):
source_text = '–༼༽❆♖мама坦'
tokenized_text = ['[UNK]', '[UNK]', '[UNK]', '[UNK]', '坦']
true_bounds = [(0, 1), (1, 2), (2, 3), (3, 4), (9, 10)]
self.assertEqual(true_bounds, BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text))
def test_calculate_bounds_of_tokens_positive05(self):
source_text = '–༼ ༽❆♖мама坦мыла раму'
tokenized_text = ['[UNK]', '[UNK]', '[UNK]', '[UNK]', '坦', 'мы', '##ла', 'р', '##ам', '##у']
true_bounds = [(0, 1), (1, 2), (3, 4), (4, 5), (10, 11), (11, 13), (13, 15), (16, 17), (17, 19), (19, 20)]
self.assertEqual(true_bounds, BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text))
def test_calculate_bounds_of_tokens_positive06(self):
source_text = 'Ёжик идёт домой'
tokenized_text = ['Ё', '##жи', '##к', 'идёт', 'домой']
true_bounds = [(0, 1), (1, 3), (3, 4), (5, 9), (10, 15)]
self.assertEqual(true_bounds, BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text))
def test_detect_token_labels_positive01(self):
source_text = 'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози.'
tokenized_text = ['Ба', '##рак', 'Об', '##ама', 'принимает', 'в', 'Б', '##елом', 'доме', 'своего',
'французского', 'кол', '##ле', '##гу', 'Н', '##ико', '##ля', 'Са', '##рко', '##зи', '.']
token_bounds = BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text)
indices_of_named_entities = np.array(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 1}
y_true = np.array(
[0, 2, 1, 1, 1, 0, 0, 4, 3, 3, 0, 0, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive02(self):
source_text = 'С 1876 г Павлов ассистирует профессору К. Н. Устимовичу в Медико-хирургической академии и ' \
'параллельно изучает физиологию кровообращения.'
tokenized_text = ['С', '1876', 'г', 'Павло', '##в', 'а', '##сси', '##сти', '##рует', 'профессор', '##у', 'К',
'.', 'Н', '.', 'У', '##сти', '##мов', '##ич', '##у', 'в', 'М', '##еди', '##ко', '-',
'х', '##ир', '##ург', '##ической', 'академии', 'и', 'пара', '##лл', '##ельно',
'из', '##уч', '##ает', 'ф', '##из', '##ио', '##логи', '##ю',
'к', '##рово', '##об', '##ращения', '.']
token_bounds = BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text)
indices_of_named_entities = np.array(
[0, 0, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 3, 4: 2, 5: 4}
y_true = np.array(
[0, 0, 2, 1, 4, 3, 0, 0, 0, 0, 6, 5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 8, 7, 7, 7, 7, 7, 7, 7, 7, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 64)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive03(self):
source_text = 'Весной 1890 года Варшавский и Томский университеты избирают его профессором.'
tokenized_text = ['В', '##есной', '1890', 'года', 'В', '##ар', '##ша', '##вский', 'и', 'Томск', '##ий',
'университет', '##ы', 'из', '##бира', '##ют', 'его', 'профессором', '.']
token_bounds = BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text)
indices_of_named_entities = np.array(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 2}
y_true = np.array(
[0, 2, 1, 1, 1, 4, 3, 3, 3, 3, 4, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive04(self):
source_text = 'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози.'
tokenized_text = ['Ба', '##рак', 'Об', '##ама', 'принимает', 'в', 'Б', '##елом', 'доме', 'своего',
'французского', 'кол', '##ле', '##гу', 'Н', '##ико', '##ля', 'Са', '##рко', '##зи', '.']
token_bounds = BERT_NER.calculate_bounds_of_tokens(source_text, tokenized_text)
indices_of_named_entities = np.array(
[0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 1}
y_true = np.array(
[0, 2, 1, 1, 1, 0, 0, 4, 3, 3, 0, 0, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_calculate_indices_of_named_entities(self):
source_text = 'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози.'
classes_list = ('LOCATION', 'ORG', 'PERSON')
named_entities = {'PERSON': [(0, 11), (63, 77)], 'LOCATION': [(24, 34)]}
true_indices = np.array(
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 0],
dtype=np.int32
)
true_labels_to_classes = {1: 1, 2: 3, 3: 3}
indices, labels_to_classes = BERT_NER.calculate_indices_of_named_entities(source_text, classes_list,
named_entities)
self.assertIsInstance(indices, np.ndarray)
self.assertIsInstance(labels_to_classes, dict)
self.assertEqual(true_indices.shape, indices.shape)
self.assertEqual(true_indices.tolist(), indices.tolist())
self.assertEqual(set(true_labels_to_classes.keys()), set(labels_to_classes.keys()))
for label_ID in true_labels_to_classes:
self.assertEqual(true_labels_to_classes[label_ID], labels_to_classes[label_ID])
def test_tokenize_by_character_groups(self):
source_text = 'Один из последних представителей клады, тираннозавр (Tyrannosaurus rex), живший 66–67 ' \
'миллионов лет назад, был одним из крупнейших когда-либо живших сухопутных хищников'
true_tokens = ['Один', 'из', 'последних', 'представителей', 'клады', ',', 'тираннозавр', '(', 'Tyrannosaurus',
'rex', ')', ',', 'живший', '66', '–', '67', 'миллионов', 'лет', 'назад', ',', 'был', 'одним',
'из', 'крупнейших', 'когда', '-', 'либо', 'живших', 'сухопутных', 'хищников']
self.assertEqual(true_tokens, BERT_NER.tokenize_by_character_groups(source_text))
def test_calc_similarity_between_entities_positive01(self):
gold_entity = (3, 9)
predicted_entity = (3, 9)
true_similarity = 1.0
true_tp = 6
true_fp = 0
true_fn = 0
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_calc_similarity_between_entities_positive02(self):
gold_entity = (4, 8)
predicted_entity = (3, 9)
true_similarity = 0.666666667
true_tp = 4
true_fp = 2
true_fn = 0
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_calc_similarity_between_entities_positive03(self):
gold_entity = (3, 9)
predicted_entity = (4, 8)
true_similarity = 0.666666667
true_tp = 4
true_fp = 0
true_fn = 2
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_calc_similarity_between_entities_positive04(self):
gold_entity = (3, 9)
predicted_entity = (2, 8)
true_similarity = 0.714285714
true_tp = 5
true_fp = 1
true_fn = 1
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_calc_similarity_between_entities_positive05(self):
gold_entity = (2, 8)
predicted_entity = (3, 9)
true_similarity = 0.714285714
true_tp = 5
true_fp = 1
true_fn = 1
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_calc_similarity_between_entities_positive06(self):
gold_entity = (3, 9)
predicted_entity = (10, 16)
true_similarity = 0.0
true_tp = 0
true_fp = 6
true_fn = 6
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_calc_similarity_between_entities_positive07(self):
gold_entity = (3, 9)
predicted_entity = (0, 2)
true_similarity = 0.0
true_tp = 0
true_fp = 2
true_fn = 6
similarity, tp, fp, fn = BERT_NER.calc_similarity_between_entities(gold_entity, predicted_entity)
self.assertAlmostEqual(true_similarity, similarity, places=4)
self.assertEqual(true_tp, tp)
self.assertEqual(true_fp, fp)
self.assertEqual(true_fn, fn)
def test_load_dataset_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
file_name = os.path.join(base_dir, 'dataset_with_paragraphs.json')
X_true = [
'Александр Вертинский. «Я не знаю, зачем и кому это нужно…»',
'21 марта 1889 года родился главный русский шансонье XX века, печальный Пьеро, вписавший свою судьбу в '
'историю отечественной культуры',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Трехкомнатная квартира на последнем этаже дома на углу Тверской и Козицкого переулка в Москве и сегодня '
'выглядит так, словно ее хозяин вот-вот вернется. В просторном кабинете все те же большие книжные шкафы, '
'все тот же гигантский письменный стол с наполеоновским вензелем и бюстом Вольтера.',
'Сейчас в кабинете все чаще бывает лишь вдова Вертинского. Вновь и вновь перечитывает его письма, '
'рукописи. Он смотрит на нее с фотографий, развешанных на стенах, расставленных на столе, и словно '
'возвращает в те пятнадцать лет неизбывного счастья, когда по квартире витает запах табака и лаванды, дом '
'полон гостей и шумные застолья длятся допоздна. И все это — будто здесь и сейчас. Нет, время не '
'остановилось, оно сомкнуло объятия, чтобы вновь и вновь перечитывать эту странную, загадочную судьбу.',
'Считается, что свой голос Георгий Иванов обрёл в эмиграции и благодаря эмиграции. Мол, утрата родины '
'стала для него тем «простым человеческим горем», которого так не хватало по форме безупречным его стихам, '
'чтобы они наполнились содержанием. На самом деле это не совсем так, потому что точка сборки Георгия '
'Иванова была смещена ещё в Петербурге.',
'Георгий Иванов. На грани музыки и сна',
'Первое детское воспоминание Вертинского — о смерти матери. Трехлетний Саша сидит на горшке и выковыривает '
'глаза у плюшевого медвежонка. Горничная Лизка отрывает мальчика от увлекательного занятия: «Вставай, твоя '
'мама умерла!» Мать лежит в серебристом гробу на столе, тело ее скрывают цветы; у изголовья стоят '
'серебряные подсвечники и маленькая табуретка. В руке Саша сжимает шоколадку, он бросается к матери, чтобы '
'угостить. Но мать не раскрывает рта…',
'Через два года от чахотки умер отец. Однажды ранней весной его нашли без чувств на могиле супруги. '
'Оправиться от болезни он уже не смог. Когда кровь хлынула горлом, рядом с ним была только десятилетняя '
'дочь Надя, не знавшая, как помочь. Обессиленный отец упал на подушку и захлебнулся кровью.',
'Старшая сестра матери забрала Надю к себе в Ковно. Саша остался жить в Киеве с другой сестрой матери, '
'которая уверила мальчика в том, что его сестра умерла. То же самое было сказано Наде о брате. Спустя годы '
'Александр случайно обнаружит упоминание о Н. Н. Вертинской в журнале «Театр и искусство», напишет ей, и '
'выяснится, что это его сестра. Во время Первой мировой Вертинскому сообщат, что Надя покончила с собой. '
'Только после смерти Вертинского его вдова выяснит, что Надежда Николаевна живет в Ленинграде.',
'Смерть причудливо и неотвратимо вписалась в его жизнь. Смерть была тем миром, где кончались тщета '
'мальчика Мая и тревоги Безноженьки и наступал долгожданный покой.',
'Александр Вертинский появился на свет «незаконнорожденным». Родственники отца и матери не одобряли союз '
'Николая Вертинского с Евгенией Скалацкой (Сколацкой) даже тогда, когда родились Надя и Саша. Евгения '
'Степановна происходила из дворянского рода, а Николай Петрович был присяжным поверенным. Первая жена отца '
'по настоянию родственников Николая Вертинского не давала ему развода. Так что пришлось усыновить '
'собственных детей.',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Театр стал маниакальной страстью Вертинского еще с гимназических лет. Он любыми способами проникал на '
'спектакли, оперы, концерты, выступал в любительских постановках в контрактовом зале на киевском Подоле и '
'подвизался статистом в Соловцовском театре — разумеется, бесплатно. А чтобы не умереть с голоду, брался '
'за любую работу — пописывал рецензии на выступления гастролеров, служил корректором в типографии, '
'нанимался помощником бухгалтера в гостиницу, продавал открытки, грузил арбузы на барках и даже '
'подворовывал у двоюродной сестры безделушки, чтобы сбыть их на толкучке.',
'С армией Колчака бежала из Владивостока семья цыган Димитриевичей, на пароходах генерала Врангеля '
'спасались Александр Вертинский и Надежда Плевицкая, уходили куда угодно, лишь бы подальше от Советов, '
'многие звёзды и звёздочки... Да, в первой эмиграции оказалось немало творческих личностей, работавших в '
'интересующем нас жанре русской песни, но даже самые яркие их имена блекнут рядом со сверкающей снежной '
'шапкой Монблана в лице Фёдора Ивановича Шаляпина.',
'Живой бог русской музыки',
'В 1911–1912 годах журналы «Киевская неделя» и «Лукоморье» опубликовали первые рассказы Вертинского: '
'«Красные бабочки» и «Моя невеста» — декадентские, но с бунинской интонацией. «Красные бабочки» — о '
'мальчике-сироте, случайно погубившем красных бабочек, вышитых на черном платье. Мальчик наказан суровой '
'теткой, но бабочки являются ему во сне, чтобы отомстить за погибших сестер. «Моя невеста» — о сумасшедшей '
'бездомной, читающей стихи на эстраде опустевшего осеннего парка. Эта «светлая малютка-невеста» при '
'ближайшем рассмотрении оказывается «маленьким уродливым существом» с «длинным, острым, серо-зеленого '
'цвета лицом», «черно-синими припухшими губами», «без бровей, без ресниц, с глубоко вдавленными в череп '
'глазами».',
'Свободное от литературных посиделок и работы время Вертинский коротал с киевской богемной молодежью в '
'подвальном кабачке, закусывая дешевое вино дешевым сыром. В приобретенном на толкучке подержанном фраке, '
'всегда с живым цветком в петлице, всегда презрительный и надменный, он сыпал заранее продуманными '
'афоризмами и производил на окружающих впечатление большого оригинала. Но прекрасно понимал, что вечно так '
'продолжаться не может.',
'Скопив 25 рублей и подыскав компаньона с театральным гардеробчиком (без собственных костюмов в театрах '
'тогда статистов не брали), Вертинский подался в Москву.',
'Здесь он играл небольшие роли в любительских студиях, поступил в театр миниатюр Марьи Арцыбушевой, где '
'служил за котлеты и борщ, соглашался на любые роли в кино, показывался во МХАТе — но из-за своего '
'грассирующего «р» был отвергнут Станиславским.',
'А внутри бурлило и клокотало, требовало выхода и не находило его. Слишком много вокруг было никому '
'неизвестных талантов и знаменитых бездарностей. Столицы захлестнула эпидемия увлечения кокаином. Его '
'покупали сначала в аптеках, затем с рук, носили в пудреницах и портсигарах, щедро одалживали и '
'одалживались. Однажды выглянув из выходившего на крышу окна мансарды, которую Вертинский снимал, он '
'обнаружил, что весь скат усеян пустыми коричневыми бутылочками из-под кокаина.',
'Вертинский отправился к психиатру, профессору Баженову, и, подойдя к трамвайной остановке, увидел, как '
'Пушкин сошел со своего пьедестала, оставив на нем четкий след. Александр Сергеевич сел вместе с '
'Вертинским в трамвай и достал большой старинный медный пятак — для оплаты.',
'Справиться с пристрастием к кокаину Вертинскому помогла война. Под именем Брат Пьеро он записался в '
'санитарный поезд, курсировавший от Москвы к фронту и обратно. Почти два года Вертинский перевязывал '
'раненых, читал им письма от родных, пел и даже, по его уверению, оперировал.',
'В 1915 году Вертинский вернулся в театр миниатюр Арцыбушевой с собственным номером — «Ариетки Пьеро». На '
'фоне черного занавеса в лунном луче прожектора на сцене появлялся высокий молодой человек. На его густо '
'покрытом белилами лице резко выделялись ярко-красный рот, обведенные тушью большие глаза и печально '
'вздернутые нарисованные брови. После вступления рояля этот странный юноша взмахивал руками и тихо '
'начинал:',
'Я люблю Вас, моя сегоглазочка, Золотая ошибка моя! Вы — вечегняя жуткая сказочка, Вы — цветок из кагтины '
'Гойя.',
'После бесконечных ямщиков и соловьев, аллей и ночей, дышащих сладострастьем, с одной стороны, а с другой '
'с другой — на фоне бравад футуристов, претенциозных поэз Игоря Северянина и одесской шансоньетки Изы '
'Кремер с ее занзибарами-кларами, — печальный Пьеро Вертинского стал сенсацией. Ему удалось невозможное: '
'вписать богемную экзотику — всех этих маленьких креольчиков, смуглых принцев с Антильских островов, '
'китайчат Ли, лиловых негров — в живописный ландшафт одинокой и беззащитной души; превратить ироничную '
'игру культурными символами в откровение глубокой печали.',
'Так певец без выдающихся вокальных данных, композитор, не знавший нотной грамоты, актер с дефектом дикции '
'стал всероссийским кумиром. Издательство «Прогрессивные новости» Б. Андржеевского огромными тиражами '
'выпускало «Песенки Вертинского», которые впечатлительные курсистки развозили по всей стране.',
'Начались гастроли и бенефисы, от восторженной и возмущенной публики нередко приходилось спасаться через '
'черный ход. Посыпались приглашения в кино. Популярность Вертинского была столь велика, что в феврале 1917 '
'года Александра Керенского называли «печальным Пьеро российской революции».',
'Как и подавляющее большинство представителей русской интеллигенции, Вертинский связывал с Февральской '
'революцией опьяняющие надежды на обновление и очищение. Октябрьский переворот заставил протрезветь. Под '
'впечатлением гибели московских юнкеров, убитых большевиками, Вертинский написал знаменитых «Юнкеров»:',
'Я не знаю, зачем и кому это нужно, Кто послал их на смерть недрожавшей рукой, Только так беспощадно, так '
'зло и ненужно Опустили их в вечный покой.',
'Песня стала настоящим белогвардейским гимном — с нею шли в бой и умирали русские офицеры и юнкера. '
'Существует легенда, что Вертинского вызывали в ЧК для дачи объяснений по поводу контрреволюционной песни. '
'Артист возмутился: «Но вы же не можете запретить мне их жалеть!» И в ответ услышал: «Дышать запретим, '
'если потребуется».',
'Как и многие эпизоды из жизни Вертинского, допрос в ЧК не имеет документальных подтверждений. Тем не '
'менее факт остается фактом: вслед за отступающей белой армией, как и многие российские артисты, '
'Вертинский подался на юг, где все еще верили в счастливую развязку и мучились тяжелым предчувствием, что '
'ее никогда не будет.',
'В 1920 году на пароходе «Великий князь Александр Михайлович», увозящем барона Врангеля, Вертинский '
'покинул Россию, отправившись в добровольное изгнание на 23 года.',
'Его одиссея началась с Константинополя, где он пел разноязыким эмигрантам цыганские романсы и раздобыл '
'греческий паспорт на имя Александра Вертидиса. Закружилась круговерть авантюр, лиц, городов, стран. '
'Румыния, Польша, Германия, Австрия, Венгрия, Палестина, Египет, Ливия, Франция, США… Выступления в '
'ресторанах и кабаках — между горячим и десертом; в мюзик-холлах и фешенебельных отелях — для королей '
'Густава Шведского, Альфонса Испанского, принца Уэльского, для Вандербильтов и Ротшильдов.',
'В Бессарабии его арестовали по обвинению в просоветской пропаганде песней «В степи молдаванской» — в '
'особенности строками «О, как сладко, как больно сквозь слезы / Хоть взглянуть на родную страну…» '
'Естественно, в деятельности Вертинского усмотрели происки НКВД. С тех пор слава чекистского агента '
'бросает тень на его репутацию по сей день — как будто агент НКВД не может быть великим артистом…',
'Все двадцать с лишним лет, где бы Вертинский ни выступал, он пел только на русском (исключение делал лишь '
'для любимой Франции, где исполнял несколько своих песенок по-французски). Его основной аудиторией, '
'конечно же, была русская эмиграция, для которой печальный Пьеро являлся не просто символом утраченной '
'России, но, по выражению Шаляпина, «сказителем земли русской».',
'Уже с начала 1920-х Вертинский просил разрешения вернуться — через советское консульство, через Анатолия '
'Луначарского, возглавившего советскую делегацию в Берлине, — но неизменно получал отказ.',
'В конце 1935 года он приехал в Китай — в Шанхае и Харбине была довольно обширная русская община. В Шанхае '
'артист дал двадцать аншлаговых концертов (даже Шаляпину здесь сумели организовать только два '
'выступления), однако бесконечно петь для одной и той же аудитории невозможно, и Вертинский намеревался '
'через какое-то время вернуться в Европу. Но в 1937 году его вдруг пригласили в СССР — без всяких просьб '
'со стороны артиста. Вертинский остался в Китае, ожидая, когда организуют возвращение. Он ждал пять лет.',
'Что побудило Сталина позвать Вертинского? Рассказывали, что генералиссимус любил слушать ариетки Брата '
'Пьеро в часы отдыха — особенно песню «В синем и далеком океане». Легенда приписывает также Сталину '
'известную фразу «Дадим артисту Вертинскому спокойно дожить на Родине», произнесенную после того, как '
'«отец всех народов» лично вычеркнул артиста из ждановского постановления, громившего Дмитрия Шостаковича '
'и Сергея Прокофьева. Нравился Сталину Вертинский или нет, несомненно одно — возвращение «соловья '
'белоэмиграции», мировой знаменитости было идеологически выгодно советскому режиму, тем более в 1943 году, '
'когда открылся союзный фронт и в стране бродили оттепельные настроения.',
'Вертинский же всегда и всем говорил о том, что возвращается, чтобы «рассказать о страданиях эмиграции» и '
'«помирить Родину с ней». «Шанхайская Тэффи» Наталия Ильина не преминула по этому поводу съязвить в '
'автобиографическом романе «Возвращение». Ее Джордж Эрмин (Георгий Еремин), подозрительно похожий на '
'Вертинского, прочитав Конституцию СССР, перекрестился и изрек: «Я подумал, что же это — Китеж, '
'воскресающий без нас!»',
'Ранним утром 4 ноября 1943 года на пароходе «Дайрен-Мару» Вертинский покинул Шанхай. С ним были его '
'двадцатилетняя жена Лидия и ее мать, на руках он держал трехмесячную дочь Марианну. Необходимость '
'содержать семью была не самой последней причиной переезда в СССР. Шла война, зверствовала инфляция, '
'иностранные конторы в Китае закрывались, русские эмигранты спасались от японской оккупации. Выступать '
'становилось все труднее. Вертинский пускался в рискованные финансовые авантюры, не имевшие успеха. Его '
'самой удачной коммерческой операцией была закупка пяти бутылей водки накануне рождения ребенка. Продав '
'их после повышения цен, Вертинский оплатил счета за услуги роддома.',
'Первым советским городом на их пути стала Чита. Стоял жуткий мороз, семью Вертинского поселили в '
'гостинице, где практически не топили, а по стенам ползали клопы. А в местной филармонии артиста уже '
'поджидала телеграмма из Москвы с распоряжением дать в Чите несколько концертов. Родина встречала блудного '
'сына.',
'О его возвращении ходили анекдоты. В одном из них рассказывалось, как Вертинский, приехав в СССР, выходит '
'из вагона с двумя чемоданами, ставит их, целует землю и смотрит вокруг: «Не узнаю тебя, Россия!» '
'Обернувшись, обнаруживает, что чемоданов нет. «Узнаю тебя, Россия!» — восклицает артист. В другом '
'повествовалось о приеме, устроенном в честь Вертинского «пролетарским графом» Алексеем Николаевичем '
'Толстым. Гости долго томятся, ожидая, когда их пригласят к столу. Кто-то из присутствующих, оглядев '
'собравшееся общество — граф Толстой, граф Игнатьев, митрополит Николай Крутицкий, Александр Вертинский, —'
' спрашивает: «Кого ждем?» Остроумец-куплетист Смирнов-Сокольский отвечает: «Государя!»',
'Первой советской киноролью Вертинского стал кардинал Бирнч в фильме Михаила Калатозова «Заговор '
'обреченных». Актер сыграл изысканного, сладкоречивого патриция со следами былого донжуанства. Так и '
'должен выглядеть настоящий враг советского режима — образованный, воспитанный, обвораживающий своим '
'лоском. Только такие и могут строить заговоры и вынашивать планы государственного переворота. Сталинская '
'премия за роль кардинала свидетельствовала о высочайшем одобрении этой трактовки.',
'Такого же двуликого Януса Вертинский исполнил в помпезном фильме Сергея Юткевича «Великий воин '
'Скандербег». Возможно, он играл бы маскирующихся иродов и дальше, если бы Исидор Анненский не предложил '
'ему роль князя в экранизации чеховской «Анны на шее». Одним своим появлением на экране Вертинский, этот '
'обломок царской России, воскрешал шик дворянских собраний и балов при дворе.',
'Положение «советского артиста» Вертинского было довольно странным. С одной стороны, явное благоволение '
'властей: его с семьей поселили в «Метрополе», затем выделили квартиру, наградили высшей государственной '
'премией. Правда, семья в течение трех лет обитала в «Метрополе» не от хорошей жизни. Съехать было просто '
'некуда, потому что выделенная квартира находилась на первом этаже двухэтажного дома на Хорошевском шоссе. '
'Артист опасался поселяться в ней и с помощью сложных маневров обменял ее на квартиру на улице Горького, '
'которая была в таком жутком состоянии, что нуждалась в капитальном ремонте. Опасения Вертинского, как '
'выяснилось позже, были не напрасны — квартира на Хорошевском шоссе подверглась налету знаменитой «Черной '
'кошки».',
'С другой стороны, из ста с лишним песен к исполнению было разрешено не более тридцати (авторство текстов '
'Георгия Иванова и Николая Гумилева Вертинскому пришлось приписать себе), единственная прижизненная '
'пластинка вышла в 1944 году, о концертах — ни строчки в прессе. «Я существую на правах публичного дома, —'
' горько шутил Вертинский, — все ходят, но в обществе говорить об этом не принято».',
'Из эмиграции Вертинский вернулся практически с пустыми карманами, вскоре родилась вторая дочь, Настя. '
'Гастрольбюро обеспечило артисту по 20–25 концертов в месяц по всей стране от Средней Азии до Дальнего '
'Востока — в нетопленных, неприспособленных для выступлений залах с расстроенными роялями и пьяной '
'публикой. Но концертная жизнь в европейских кабаках приучила его работать в любых условиях.',
'Платили Вертинскому по самому низкому тарифу, поскольку у него не было никаких званий. За концерт артист '
'получал около 800 рублей, при этом его выступления всегда проходили при аншлагах и собирали десятки тысяч '
'рублей. Приходилось соглашаться на все, давать левые концерты, выкручиваться, объясняться… Вместе с '
'аккомпаниатором Михаилом Брохесом он вдоль и поперек исколесил всю страну по нескольку раз, дав около '
'трех тысяч концертов. Написал два десятка стихов, работал над мемуарами, которые не успел закончить. 14 '
'лет на Родине превратили бодрого, моложавого мужчину в глубокого старика.',
'Он не хотел умереть дома, не желал, чтобы родные видели «кухню смерти». 21 мая 1957 года Вертинский '
'готовился к концерту в Ленинграде, был сдержан и немногословен. Он находился в своем 208-м номере '
'«Астории», когда начался сердечный приступ. Лекарства под рукой не оказалось. Как выяснилось позже — оно '
'бы уже не помогло. При вскрытии сосуды рассыпались, как хрупкое стекло',
'Назначен куратор строительства российской Кремниевой долины',
'Дмитрий Медведев доверил пост руководителя иннограда миллиардеру Виктору Вексельбергу.',
'Всё меньше остаётся нерешённых вопросов, касающихся возведения в России уникального Центра по разработке '
'и коммерциализации новых технологий. Власти уже не только выбрали площадку для строительства '
'отечественной Кремниевой долины в подмосковном Сколково, а также частично одобрили концепцию наукограда, '
'но и определили куратора большой инновационной стройки. «Были проведены определённые консультации по '
'поводу того, кто конкретно мог бы осуществлять такого рода работу. Мною принято решение, что российскую '
'часть этой координирующей структуры, которую мы создадим, возглавит Виктор Феликсович Вексельберг», — '
'цитирует «Взгляд» Дмитрия Медведева.',
'Исходя из заявления президента, понятно, что у проекта будут не только российские инвесторы, но и '
'иностранные партнёры, в числе которых, по словам главы государства, будут и представители иностранных '
'научных кругов. Именно на базе взаимодействия науки и бизнеса должен появиться и работать инноград. «Всё '
'это затеяли не ради того, чтобы построить определённое количество коттеджей или же создать там нормальные '
'производственные условия, лаборатории. Это всё важно, но это всё инфраструктура. Самое главное, чтобы '
'там появились люди. Для того чтобы люди появились, должна быть внятная система управления. Эта система '
'управления зависит от нас. Я думаю, что с учётом масштабности этого проекта, а с другой стороны, того, '
'что в реализации этого проекта должны быть заинтересованы не только государственные структуры, но, '
'прежде всего, российский бизнес, я считаю, что координацией российский бизнес и мог бы заняться», — '
'заявил Дмитрий Медведев.',
'Это выступление президента вполне объясняет выбор руководителя проекта. Виктор Вексельберг — бизнесмен с '
'30-летним стажем, капитал которого оценивается в 6,4 млрд долларов. Вексельберг является главой правления '
'ОАО «Тюменская нефтяная компания» (ТНК) и президентом ЗАО «Ренова». Именно он является владельцем '
'значительной части российского титана и алюминиевого бизнеса.',
'О том, почему площадкой для строительства Кремниевой долины выбрано Подмосковье, читайте в статье '
'Частного корреспондента «Сколково назначили Кремниевой долиной»'
]
y_true = [
{
"PERSON": [(0, 20)]
},
{
"PERSON": [(71, 76)]
},
{
"PERSON": [(36, 58)]
},
{
"LOCATION": [(55, 63), (66, 84), (87, 93)],
"PERSON": [(281, 289)]
},
{
"PERSON": [(45, 56)]
},
{
"LOCATION": [(334, 344)],
"PERSON": [(26, 40), (299, 314)]
},
{
"PERSON": [(0, 14)]
},
{
"PERSON": [(28, 39), (70, 74), (146, 151), (362, 366)]
},
{
"PERSON": [(207, 211)]
},
{
"PERSON": [(30, 34), (51, 55), (182, 186), (208, 217), (250, 266), (367, 378), (392, 396), (436, 447),
(471, 489)],
"LOCATION": [(44, 49), (71, 76), (498, 508)],
"ORG": [(269, 295)]
},
{
"PERSON": [(107, 110), (121, 132)]
},
{
"PERSON": [(0, 20), (104, 123), (126, 144), (146, 155), (184, 188), (191, 195), (197, 215), (251, 267),
(338, 357)]
},
{
"PERSON": [(36, 58)]
},
{
"PERSON": [(33, 44)],
"LOCATION": [(168, 185), (189, 197), (198, 204)],
"ORG": [(230, 249)]
},
{
"PERSON": [(9, 16), (89, 97), (108, 128), (131, 148), (430, 455)],
"LOCATION": [(27, 39), (191, 198), (414, 422)],
"ORG": [(2, 16)]
},
dict(),
{
"PERSON": [(87, 98)],
"ORG": [(18, 56)]
},
{
"LOCATION": [(72, 80)],
"PERSON": [(51, 61)]
},
{
"LOCATION": [(151, 157)],
"PERSON": [(130, 140)]
},
{
"PERSON": [(80, 97), (233, 246)],
"ORG": [(65, 97), (177, 182)]
},
{
"PERSON": [(373, 383)]
},
{
"PERSON": [(0, 10), (46, 54), (103, 109), (166, 185), (199, 209)]
},
{
"LOCATION": [(135, 141)],
"PERSON": [(36, 47), (79, 84), (177, 187)]
},
{
"PERSON": [(12, 22), (49, 60), (94, 99)],
"ORG": [(34, 60)]
},
{
"PERSON": [(105, 109)]
},
{
"LOCATION": [(389, 408)],
"PERSON": [(162, 178), (202, 212), (251, 256), (257, 268)]
},
{
"PERSON": [(171, 187), (226, 237)],
"ORG": [(134, 169)]
},
{
"PERSON": [(160, 171), (215, 236), (257, 262)]
},
{
"PERSON": [(68, 78), (267, 277)]
},
dict(),
{
"PERSON": [(123, 134)],
"ORG": [(146, 148)]
},
{
"PERSON": [(30, 41), (197, 207)],
"ORG": [(52, 54)]
},
{
"LOCATION": [(107, 113)],
"PERSON": [(39, 59), (78, 86), (88, 98)]
},
{
"LOCATION": [(23, 38), (203, 210), (212, 218), (220, 228), (230, 237), (239, 246), (248, 257),
(259, 265), (267, 272), (274, 281), (283, 286)],
"PERSON": [(128, 148), (403, 420), (422, 441), (450, 459)]
},
{
"PERSON": [(226, 237)],
"LOCATION": [(2, 12)],
"ORG": [(256, 260), (357, 361)]
},
{
"LOCATION": [(118, 125), (307, 313)],
"PERSON": [(34, 44), (263, 268), (332, 340)]
},
{
"PERSON": [(20, 30), (96, 117)],
"LOCATION": [(155, 162)],
"ORG": [(67, 88), (133, 152)]
},
{
"LOCATION": [(31, 36), (41, 47), (50, 57), (99, 105), (335, 341), (381, 385), (447, 452)],
"PERSON": [(153, 161), (279, 289), (426, 436)]
},
{
"PERSON": [(13, 20), (29, 40), (103, 108), (194, 201), (233, 244), (388, 407), (410, 427), (438, 445),
(446, 456)]
},
{
"LOCATION": [(338, 342), (392, 397)],
"PERSON": [(0, 10), (142, 147), (149, 163), (248, 260), (262, 276), (304, 315)]
},
{
"LOCATION": [(45, 51), (52, 56), (77, 83), (258, 262), (320, 325)],
"PERSON": [(58, 68), (120, 125), (174, 182), (425, 435), (630, 640)]
},
{
"LOCATION": [(42, 46), (221, 227), (251, 255)],
"PERSON": [(74, 85)]
},
{
"LOCATION": [(92, 96), (194, 200), (262, 268)],
"PERSON": [(70, 80), (345, 356), (379, 408), (529, 536), (543, 551), (564, 581), (583, 603), (652, 670)]
},
{
"PERSON": [(27, 38), (53, 58), (68, 86)]
},
{
"LOCATION": [(319, 325)],
"PERSON": [(20, 25), (26, 36), (65, 80), (95, 105), (169, 185), (239, 243), (286, 296)]
},
{
"PERSON": [(31, 42), (607, 618)],
"LOCATION": [(137, 146), (260, 269), (399, 416), (506, 520), (673, 690)],
"ORG": [(722, 734)]
},
{
"PERSON": [(105, 120), (123, 139), (140, 151), (323, 333)]
},
{
"PERSON": [(13, 23), (95, 100)],
"LOCATION": [(179, 191), (195, 211)],
"ORG": [(102, 114)]
},
{
"PERSON": [(8, 19), (327, 344)]
},
{
"LOCATION": [(123, 133), (199, 206)],
"PERSON": [(89, 99)]
},
{
"LOCATION": [(31, 59)]
},
{
"LOCATION": [(43, 52)],
"PERSON": [(0, 16), (65, 85)]
},
{
"ORG": [(84, 140), (620, 626)],
"LOCATION": [(65, 71), (212, 229), (232, 253), (291, 301)],
"PERSON": [(576, 605), (628, 645)]
},
{
"LOCATION": [(153, 164), (290, 298)],
"PERSON": [(925, 941)]
},
{
"ORG": [(201, 243), (246, 249), (265, 276)],
"PERSON": [(72, 90), (173, 184)]
},
{
"LOCATION": [(42, 59), (68, 79), (123, 131), (142, 160)],
"ORG": [(98, 121)]
}
]
X_loaded, y_loaded = load_dataset(file_name)
self.assertIsInstance(X_loaded, list)
self.assertIsInstance(y_loaded, list)
self.assertEqual(len(X_true), len(X_loaded))
self.assertEqual(len(y_true), len(y_loaded))
for sample_idx in range(len(X_true)):
self.assertEqual(X_true[sample_idx], X_loaded[sample_idx])
self.assertIsInstance(y_loaded[sample_idx], dict)
self.assertEqual(set(y_true[sample_idx]), set(y_loaded[sample_idx]))
for ne_type in y_true[sample_idx]:
self.assertIsInstance(y_loaded[sample_idx][ne_type], list)
self.assertEqual(len(y_true[sample_idx][ne_type]), len(y_loaded[sample_idx][ne_type]))
for entity_idx in range(len(y_true[sample_idx][ne_type])):
self.assertEqual(y_true[sample_idx][ne_type][entity_idx], y_loaded[sample_idx][ne_type][entity_idx])
def test_load_dataset_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
file_name = os.path.join(base_dir, 'dataset_without_paragraphs.json')
X_true = [
'Александр Вертинский. «Я не знаю, зачем и кому это нужно…»',
'21 марта 1889 года родился главный русский шансонье XX века, печальный Пьеро, вписавший свою судьбу в '
'историю отечественной культуры',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Трехкомнатная квартира на последнем этаже дома на углу Тверской и Козицкого переулка в Москве и сегодня '
'выглядит так, словно ее хозяин вот-вот вернется. В просторном кабинете все те же большие книжные шкафы, '
'все тот же гигантский письменный стол с наполеоновским вензелем и бюстом Вольтера.',
'Сейчас в кабинете все чаще бывает лишь вдова Вертинского. Вновь и вновь перечитывает его письма, '
'рукописи. Он смотрит на нее с фотографий, развешанных на стенах, расставленных на столе, и словно '
'возвращает в те пятнадцать лет неизбывного счастья, когда по квартире витает запах табака и лаванды, дом '
'полон гостей и шумные застолья длятся допоздна. И все это — будто здесь и сейчас. Нет, время не '
'остановилось, оно сомкнуло объятия, чтобы вновь и вновь перечитывать эту странную, загадочную судьбу.',
'Считается, что свой голос Георгий Иванов обрёл в эмиграции и благодаря эмиграции. Мол, утрата родины '
'стала для него тем «простым человеческим горем», которого так не хватало по форме безупречным его стихам, '
'чтобы они наполнились содержанием. На самом деле это не совсем так, потому что точка сборки Георгия '
'Иванова была смещена ещё в Петербурге.',
'Георгий Иванов. На грани музыки и сна',
'Первое детское воспоминание Вертинского — о смерти матери. Трехлетний Саша сидит на горшке и выковыривает '
'глаза у плюшевого медвежонка. Горничная Лизка отрывает мальчика от увлекательного занятия: «Вставай, твоя '
'мама умерла!» Мать лежит в серебристом гробу на столе, тело ее скрывают цветы; у изголовья стоят '
'серебряные подсвечники и маленькая табуретка. В руке Саша сжимает шоколадку, он бросается к матери, чтобы '
'угостить. Но мать не раскрывает рта…',
'Через два года от чахотки умер отец. Однажды ранней весной его нашли без чувств на могиле супруги. '
'Оправиться от болезни он уже не смог. Когда кровь хлынула горлом, рядом с ним была только десятилетняя '
'дочь Надя, не знавшая, как помочь. Обессиленный отец упал на подушку и захлебнулся кровью.',
'Старшая сестра матери забрала Надю к себе в Ковно. Саша остался жить в Киеве с другой сестрой матери, '
'которая уверила мальчика в том, что его сестра умерла. То же самое было сказано Наде о брате. Спустя годы '
'Александр случайно обнаружит упоминание о Н. Н. Вертинской в журнале «Театр и искусство», напишет ей, и '
'выяснится, что это его сестра. Во время Первой мировой Вертинскому сообщат, что Надя покончила с собой. '
'Только после смерти Вертинского его вдова выяснит, что Надежда Николаевна живет в Ленинграде.',
'Смерть причудливо и неотвратимо вписалась в его жизнь. Смерть была тем миром, где кончались тщета '
'мальчика Мая и тревоги Безноженьки и наступал долгожданный покой.',
'Александр Вертинский появился на свет «незаконнорожденным». Родственники отца и матери не одобряли союз '
'Николая Вертинского с Евгенией Скалацкой (Сколацкой) даже тогда, когда родились Надя и Саша. Евгения '
'Степановна происходила из дворянского рода, а Николай Петрович был присяжным поверенным. Первая жена отца '
'по настоянию родственников Николая Вертинского не давала ему развода. Так что пришлось усыновить '
'собственных детей.',
'Жизнь с самого начала оставляла для Александра Вертинского слишком много вопросов без ответов. Слишком '
'много «пустого» пространства. И он научился заполнять его вымыслом. Создал собственный театр с безумным '
'множеством персонажей, каждый из которых — от сироток-калек и безымянных кокаинеточек до гениальных '
'скрипачей и кинодив — был им самим.',
'Театр стал маниакальной страстью Вертинского еще с гимназических лет. Он любыми способами проникал на '
'спектакли, оперы, концерты, выступал в любительских постановках в контрактовом зале на киевском Подоле и '
'подвизался статистом в Соловцовском театре — разумеется, бесплатно. А чтобы не умереть с голоду, брался '
'за любую работу — пописывал рецензии на выступления гастролеров, служил корректором в типографии, '
'нанимался помощником бухгалтера в гостиницу, продавал открытки, грузил арбузы на барках и даже '
'подворовывал у двоюродной сестры безделушки, чтобы сбыть их на толкучке.',
'С армией Колчака бежала из Владивостока семья цыган Димитриевичей, на пароходах генерала Врангеля '
'спасались Александр Вертинский и Надежда Плевицкая, уходили куда угодно, лишь бы подальше от Советов, '
'многие звёзды и звёздочки... Да, в первой эмиграции оказалось немало творческих личностей, работавших в '
'интересующем нас жанре русской песни, но даже самые яркие их имена блекнут рядом со сверкающей снежной '
'шапкой Монблана в лице Фёдора Ивановича Шаляпина.',
'Живой бог русской музыки',
'В 1911–1912 годах журналы «Киевская неделя» и «Лукоморье» опубликовали первые рассказы Вертинского: '
'«Красные бабочки» и «Моя невеста» — декадентские, но с бунинской интонацией. «Красные бабочки» — о '
'мальчике-сироте, случайно погубившем красных бабочек, вышитых на черном платье. Мальчик наказан суровой '
'теткой, но бабочки являются ему во сне, чтобы отомстить за погибших сестер. «Моя невеста» — о сумасшедшей '
'бездомной, читающей стихи на эстраде опустевшего осеннего парка. Эта «светлая малютка-невеста» при '
'ближайшем рассмотрении оказывается «маленьким уродливым существом» с «длинным, острым, серо-зеленого '
'цвета лицом», «черно-синими припухшими губами», «без бровей, без ресниц, с глубоко вдавленными в череп '
'глазами».',
'Свободное от литературных посиделок и работы время Вертинский коротал с киевской богемной молодежью в '
'подвальном кабачке, закусывая дешевое вино дешевым сыром. В приобретенном на толкучке подержанном фраке, '
'всегда с живым цветком в петлице, всегда презрительный и надменный, он сыпал заранее продуманными '
'афоризмами и производил на окружающих впечатление большого оригинала. Но прекрасно понимал, что вечно так '
'продолжаться не может.',
'Скопив 25 рублей и подыскав компаньона с театральным гардеробчиком (без собственных костюмов в театрах '
'тогда статистов не брали), Вертинский подался в Москву.',
'Здесь он играл небольшие роли в любительских студиях, поступил в театр миниатюр Марьи Арцыбушевой, где '
'служил за котлеты и борщ, соглашался на любые роли в кино, показывался во МХАТе — но из-за своего '
'грассирующего «р» был отвергнут Станиславским.',
'А внутри бурлило и клокотало, требовало выхода и не находило его. Слишком много вокруг было никому '
'неизвестных талантов и знаменитых бездарностей. Столицы захлестнула эпидемия увлечения кокаином. Его '
'покупали сначала в аптеках, затем с рук, носили в пудреницах и портсигарах, щедро одалживали и '
'одалживались. Однажды выглянув из выходившего на крышу окна мансарды, которую Вертинский снимал, он '
'обнаружил, что весь скат усеян пустыми коричневыми бутылочками из-под кокаина.',
'Вертинский отправился к психиатру, профессору Баженову, и, подойдя к трамвайной остановке, увидел, как '
'Пушкин сошел со своего пьедестала, оставив на нем четкий след. Александр Сергеевич сел вместе с '
'Вертинским в трамвай и достал большой старинный медный пятак — для оплаты.',
'Справиться с пристрастием к кокаину Вертинскому помогла война. Под именем Брат Пьеро он записался в '
'санитарный поезд, курсировавший от Москвы к фронту и обратно. Почти два года Вертинский перевязывал '
'раненых, читал им письма от родных, пел и даже, по его уверению, оперировал.',
'В 1915 году Вертинский вернулся в театр миниатюр Арцыбушевой с собственным номером — «Ариетки Пьеро». На '
'фоне черного занавеса в лунном луче прожектора на сцене появлялся высокий молодой человек. На его густо '
'покрытом белилами лице резко выделялись ярко-красный рот, обведенные тушью большие глаза и печально '
'вздернутые нарисованные брови. После вступления рояля этот странный юноша взмахивал руками и тихо '
'начинал:',
'Я люблю Вас, моя сегоглазочка, Золотая ошибка моя! Вы — вечегняя жуткая сказочка, Вы — цветок из кагтины '
'Гойя.',
'После бесконечных ямщиков и соловьев, аллей и ночей, дышащих сладострастьем, с одной стороны, а с другой '
'с другой — на фоне бравад футуристов, претенциозных поэз Игоря Северянина и одесской шансоньетки Изы '
'Кремер с ее занзибарами-кларами, — печальный Пьеро Вертинского стал сенсацией. Ему удалось невозможное: '
'вписать богемную экзотику — всех этих маленьких креольчиков, смуглых принцев с Антильских островов, '
'китайчат Ли, лиловых негров — в живописный ландшафт одинокой и беззащитной души; превратить ироничную '
'игру культурными символами в откровение глубокой печали.',
'Так певец без выдающихся вокальных данных, композитор, не знавший нотной грамоты, актер с дефектом дикции '
'стал всероссийским кумиром. Издательство «Прогрессивные новости» Б. Андржеевского огромными тиражами '
'выпускало «Песенки Вертинского», которые впечатлительные курсистки развозили по всей стране.',
'Начались гастроли и бенефисы, от восторженной и возмущенной публики нередко приходилось спасаться через '
'черный ход. Посыпались приглашения в кино. Популярность Вертинского была столь велика, что в феврале 1917 '
'года Александра Керенского называли «печальным Пьеро российской революции».',
'Как и подавляющее большинство представителей русской интеллигенции, Вертинский связывал с Февральской '
'революцией опьяняющие надежды на обновление и очищение. Октябрьский переворот заставил протрезветь. Под '
'впечатлением гибели московских юнкеров, убитых большевиками, Вертинский написал знаменитых «Юнкеров»:',
'Я не знаю, зачем и кому это нужно, Кто послал их на смерть недрожавшей рукой, Только так беспощадно, так '
'зло и ненужно Опустили их в вечный покой.',
'Песня стала настоящим белогвардейским гимном — с нею шли в бой и умирали русские офицеры и юнкера. '
'Существует легенда, что Вертинского вызывали в ЧК для дачи объяснений по поводу контрреволюционной песни. '
'Артист возмутился: «Но вы же не можете запретить мне их жалеть!» И в ответ услышал: «Дышать запретим, '
'если потребуется».',
'Как и многие эпизоды из жизни Вертинского, допрос в ЧК не имеет документальных подтверждений. Тем не '
'менее факт остается фактом: вслед за отступающей белой армией, как и многие российские артисты, '
'Вертинский подался на юг, где все еще верили в счастливую развязку и мучились тяжелым предчувствием, что '
'ее никогда не будет.',
'В 1920 году на пароходе «Великий князь Александр Михайлович», увозящем барона Врангеля, Вертинский '
'покинул Россию, отправившись в добровольное изгнание на 23 года.',
'Его одиссея началась с Константинополя, где он пел разноязыким эмигрантам цыганские романсы и раздобыл '
'греческий паспорт на имя Александра Вертидиса. Закружилась круговерть авантюр, лиц, городов, стран. '
'Румыния, Польша, Германия, Австрия, Венгрия, Палестина, Египет, Ливия, Франция, США… Выступления в '
'ресторанах и кабаках — между горячим и десертом; в мюзик-холлах и фешенебельных отелях — для королей '
'Густава Шведского, Альфонса Испанского, принца Уэльского, для Вандербильтов и Ротшильдов.',
'В Бессарабии его арестовали по обвинению в просоветской пропаганде песней «В степи молдаванской» — в '
'особенности строками «О, как сладко, как больно сквозь слезы / Хоть взглянуть на родную страну…» '
'Естественно, в деятельности Вертинского усмотрели происки НКВД. С тех пор слава чекистского агента '
'бросает тень на его репутацию по сей день — как будто агент НКВД не может быть великим артистом…',
'Все двадцать с лишним лет, где бы Вертинский ни выступал, он пел только на русском (исключение делал лишь '
'для любимой Франции, где исполнял несколько своих песенок по-французски). Его основной аудиторией, '
'конечно же, была русская эмиграция, для которой печальный Пьеро являлся не просто символом утраченной '
'России, но, по выражению Шаляпина, «сказителем земли русской».',
'Уже с начала 1920-х Вертинский просил разрешения вернуться — через советское консульство, через Анатолия '
'Луначарского, возглавившего советскую делегацию в Берлине, — но неизменно получал отказ.',
'В конце 1935 года он приехал в Китай — в Шанхае и Харбине была довольно обширная русская община. В Шанхае '
'артист дал двадцать аншлаговых концертов (даже Шаляпину здесь сумели организовать только два '
'выступления), однако бесконечно петь для одной и той же аудитории невозможно, и Вертинский намеревался '
'через какое-то время вернуться в Европу. Но в 1937 году его вдруг пригласили в СССР — без всяких просьб '
'со стороны артиста. Вертинский остался в Китае, ожидая, когда организуют возвращение. Он ждал пять лет.',
'Что побудило Сталина позвать Вертинского? Рассказывали, что генералиссимус любил слушать ариетки Брата '
'Пьеро в часы отдыха — особенно песню «В синем и далеком океане». Легенда приписывает также Сталину '
'известную фразу «Дадим артисту Вертинскому спокойно дожить на Родине», произнесенную после того, как '
'«отец всех народов» лично вычеркнул артиста из ждановского постановления, громившего Дмитрия Шостаковича '
'и Сергея Прокофьева. Нравился Сталину Вертинский или нет, несомненно одно — возвращение «соловья '
'белоэмиграции», мировой знаменитости было идеологически выгодно советскому режиму, тем более в 1943 году, '
'когда открылся союзный фронт и в стране бродили оттепельные настроения.',
'Вертинский же всегда и всем говорил о том, что возвращается, чтобы «рассказать о страданиях эмиграции» и '
'«помирить Родину с ней». «Шанхайская Тэффи» Наталия Ильина не преминула по этому поводу съязвить в '
'автобиографическом романе «Возвращение». Ее Джордж Эрмин (Георгий Еремин), подозрительно похожий на '
'Вертинского, прочитав Конституцию СССР, перекрестился и изрек: «Я подумал, что же это — Китеж, '
'воскресающий без нас!»',
'Ранним утром 4 ноября 1943 года на пароходе «Дайрен-Мару» Вертинский покинул Шанхай. С ним были его '
'двадцатилетняя жена Лидия и ее мать, на руках он держал трехмесячную дочь Марианну. Необходимость '
'содержать семью была не самой последней причиной переезда в СССР. Шла война, зверствовала инфляция, '
'иностранные конторы в Китае закрывались, русские эмигранты спасались от японской оккупации. Выступать '
'становилось все труднее. Вертинский пускался в рискованные финансовые авантюры, не имевшие успеха. Его '
'самой удачной коммерческой операцией была закупка пяти бутылей водки накануне рождения ребенка. Продав '
'их после повышения цен, Вертинский оплатил счета за услуги роддома.',
'Первым советским городом на их пути стала Чита. Стоял жуткий мороз, семью Вертинского поселили в '
'гостинице, где практически не топили, а по стенам ползали клопы. А в местной филармонии артиста уже '
'поджидала телеграмма из Москвы с распоряжением дать в Чите несколько концертов. Родина встречала блудного '
'сына.',
'О его возвращении ходили анекдоты. В одном из них рассказывалось, как Вертинский, приехав в СССР, выходит '
'из вагона с двумя чемоданами, ставит их, целует землю и смотрит вокруг: «Не узнаю тебя, Россия!» '
'Обернувшись, обнаруживает, что чемоданов нет. «Узнаю тебя, Россия!» — восклицает артист. В другом '
'повествовалось о приеме, устроенном в честь Вертинского «пролетарским графом» Алексеем Николаевичем '
'Толстым. Гости долго томятся, ожидая, когда их пригласят к столу. Кто-то из присутствующих, оглядев '
'собравшееся общество — граф Толстой, граф Игнатьев, митрополит Николай Крутицкий, Александр Вертинский, —'
' спрашивает: «Кого ждем?» Остроумец-куплетист Смирнов-Сокольский отвечает: «Государя!»',
'Первой советской киноролью Вертинского стал кардинал Бирнч в фильме Михаила Калатозова «Заговор '
'обреченных». Актер сыграл изысканного, сладкоречивого патриция со следами былого донжуанства. Так и '
'должен выглядеть настоящий враг советского режима — образованный, воспитанный, обвораживающий своим '
'лоском. Только такие и могут строить заговоры и вынашивать планы государственного переворота. Сталинская '
'премия за роль кардинала свидетельствовала о высочайшем одобрении этой трактовки.',
'Такого же двуликого Януса Вертинский исполнил в помпезном фильме Сергея Юткевича «Великий воин '
'Скандербег». Возможно, он играл бы маскирующихся иродов и дальше, если бы Исидор Анненский не предложил '
'ему роль князя в экранизации чеховской «Анны на шее». Одним своим появлением на экране Вертинский, этот '
'обломок царской России, воскрешал шик дворянских собраний и балов при дворе.',
'Положение «советского артиста» Вертинского было довольно странным. С одной стороны, явное благоволение '
'властей: его с семьей поселили в «Метрополе», затем выделили квартиру, наградили высшей государственной '
'премией. Правда, семья в течение трех лет обитала в «Метрополе» не от хорошей жизни. Съехать было просто '
'некуда, потому что выделенная квартира находилась на первом этаже двухэтажного дома на Хорошевском шоссе. '
'Артист опасался поселяться в ней и с помощью сложных маневров обменял ее на квартиру на улице Горького, '
'которая была в таком жутком состоянии, что нуждалась в капитальном ремонте. Опасения Вертинского, как '
'выяснилось позже, были не напрасны — квартира на Хорошевском шоссе подверглась налету знаменитой «Черной '
'кошки».',
'С другой стороны, из ста с лишним песен к исполнению было разрешено не более тридцати (авторство текстов '
'Георгия Иванова и Николая Гумилева Вертинскому пришлось приписать себе), единственная прижизненная '
'пластинка вышла в 1944 году, о концертах — ни строчки в прессе. «Я существую на правах публичного дома, —'
' горько шутил Вертинский, — все ходят, но в обществе говорить об этом не принято».',
'Из эмиграции Вертинский вернулся практически с пустыми карманами, вскоре родилась вторая дочь, Настя. '
'Гастрольбюро обеспечило артисту по 20–25 концертов в месяц по всей стране от Средней Азии до Дальнего '
'Востока — в нетопленных, неприспособленных для выступлений залах с расстроенными роялями и пьяной '
'публикой. Но концертная жизнь в европейских кабаках приучила его работать в любых условиях.',
'Платили Вертинскому по самому низкому тарифу, поскольку у него не было никаких званий. За концерт артист '
'получал около 800 рублей, при этом его выступления всегда проходили при аншлагах и собирали десятки тысяч '
'рублей. Приходилось соглашаться на все, давать левые концерты, выкручиваться, объясняться… Вместе с '
'аккомпаниатором Михаилом Брохесом он вдоль и поперек исколесил всю страну по нескольку раз, дав около '
'трех тысяч концертов. Написал два десятка стихов, работал над мемуарами, которые не успел закончить. 14 '
'лет на Родине превратили бодрого, моложавого мужчину в глубокого старика.',
'Он не хотел умереть дома, не желал, чтобы родные видели «кухню смерти». 21 мая 1957 года Вертинский '
'готовился к концерту в Ленинграде, был сдержан и немногословен. Он находился в своем 208-м номере '
'«Астории», когда начался сердечный приступ. Лекарства под рукой не оказалось. Как выяснилось позже — оно '
'бы уже не помогло. При вскрытии сосуды рассыпались, как хрупкое стекло',
'Назначен куратор строительства российской Кремниевой долины',
'Дмитрий Медведев доверил пост руководителя иннограда миллиардеру Виктору Вексельбергу.',
'Всё меньше остаётся нерешённых вопросов, касающихся возведения в России уникального Центра по разработке '
'и коммерциализации новых технологий. Власти уже не только выбрали площадку для строительства '
'отечественной Кремниевой долины в подмосковном Сколково, а также частично одобрили концепцию наукограда, '
'но и определили куратора большой инновационной стройки. «Были проведены определённые консультации по '
'поводу того, кто конкретно мог бы осуществлять такого рода работу. Мною принято решение, что российскую '
'часть этой координирующей структуры, которую мы создадим, возглавит Виктор Феликсович Вексельберг», — '
'цитирует «Взгляд» Дмитрия Медведева.',
'Исходя из заявления президента, понятно, что у проекта будут не только российские инвесторы, но и '
'иностранные партнёры, в числе которых, по словам главы государства, будут и представители иностранных '
'научных кругов. Именно на базе взаимодействия науки и бизнеса должен появиться и работать инноград. «Всё '
'это затеяли не ради того, чтобы построить определённое количество коттеджей или же создать там нормальные '
'производственные условия, лаборатории. Это всё важно, но это всё инфраструктура. Самое главное, чтобы '
'там появились люди. Для того чтобы люди появились, должна быть внятная система управления. Эта система '
'управления зависит от нас. Я думаю, что с учётом масштабности этого проекта, а с другой стороны, того, '
'что в реализации этого проекта должны быть заинтересованы не только государственные структуры, но, '
'прежде всего, российский бизнес, я считаю, что координацией российский бизнес и мог бы заняться», — '
'заявил Дмитрий Медведев.',
'Это выступление президента вполне объясняет выбор руководителя проекта. Виктор Вексельберг — бизнесмен с '
'30-летним стажем, капитал которого оценивается в 6,4 млрд долларов. Вексельберг является главой правления '
'ОАО «Тюменская нефтяная компания» (ТНК) и президентом ЗАО «Ренова». Именно он является владельцем '
'значительной части российского титана и алюминиевого бизнеса.',
'О том, почему площадкой для строительства Кремниевой долины выбрано Подмосковье, читайте в статье '
'Частного корреспондента «Сколково назначили Кремниевой долиной»'
]
y_true = [
{
"PERSON": [(0, 20)]
},
{
"PERSON": [(71, 76)]
},
{
"PERSON": [(36, 58)]
},
{
"LOCATION": [(55, 63), (66, 84), (87, 93)],
"PERSON": [(281, 289)]
},
{
"PERSON": [(45, 56)]
},
{
"LOCATION": [(334, 344)],
"PERSON": [(26, 40), (299, 314)]
},
{
"PERSON": [(0, 14)]
},
{
"PERSON": [(28, 39), (70, 74), (146, 151), (362, 366)]
},
{
"PERSON": [(207, 211)]
},
{
"PERSON": [(30, 34), (51, 55), (182, 186), (208, 217), (250, 266), (367, 378), (392, 396), (436, 447),
(471, 489)],
"LOCATION": [(44, 49), (71, 76), (498, 508)],
"ORG": [(269, 295)]
},
{
"PERSON": [(107, 110), (121, 132)]
},
{
"PERSON": [(0, 20), (104, 123), (126, 144), (146, 155), (184, 188), (191, 195), (197, 215), (251, 267),
(338, 357)]
},
{
"PERSON": [(36, 58)]
},
{
"PERSON": [(33, 44)],
"LOCATION": [(168, 185), (189, 197), (198, 204)],
"ORG": [(230, 249)]
},
{
"PERSON": [(9, 16), (89, 97), (108, 128), (131, 148), (430, 455)],
"LOCATION": [(27, 39), (191, 198), (414, 422)],
"ORG": [(2, 16)]
},
dict(),
{
"PERSON": [(87, 98)],
"ORG": [(18, 56)]
},
{
"LOCATION": [(72, 80)],
"PERSON": [(51, 61)]
},
{
"LOCATION": [(151, 157)],
"PERSON": [(130, 140)]
},
{
"PERSON": [(80, 97), (233, 246)],
"ORG": [(65, 97), (177, 182)]
},
{
"PERSON": [(373, 383)]
},
{
"PERSON": [(0, 10), (46, 54), (103, 109), (166, 185), (199, 209)]
},
{
"LOCATION": [(135, 141)],
"PERSON": [(36, 47), (79, 84), (177, 187)]
},
{
"PERSON": [(12, 22), (49, 60), (94, 99)],
"ORG": [(34, 60)]
},
{
"PERSON": [(105, 109)]
},
{
"LOCATION": [(389, 408)],
"PERSON": [(162, 178), (202, 212), (251, 256), (257, 268)]
},
{
"PERSON": [(171, 187), (226, 237)],
"ORG": [(134, 169)]
},
{
"PERSON": [(160, 171), (215, 236), (257, 262)]
},
{
"PERSON": [(68, 78), (267, 277)]
},
dict(),
{
"PERSON": [(123, 134)],
"ORG": [(146, 148)]
},
{
"PERSON": [(30, 41), (197, 207)],
"ORG": [(52, 54)]
},
{
"LOCATION": [(107, 113)],
"PERSON": [(39, 59), (78, 86), (88, 98)]
},
{
"LOCATION": [(23, 38), (203, 210), (212, 218), (220, 228), (230, 237), (239, 246), (248, 257),
(259, 265), (267, 272), (274, 281), (283, 286)],
"PERSON": [(128, 148), (403, 420), (422, 441), (450, 459)]
},
{
"PERSON": [(226, 237)],
"LOCATION": [(2, 12)],
"ORG": [(256, 260), (357, 361)]
},
{
"LOCATION": [(118, 125), (307, 313)],
"PERSON": [(34, 44), (263, 268), (332, 340)]
},
{
"PERSON": [(20, 30), (96, 117)],
"LOCATION": [(155, 162)],
"ORG": [(67, 88), (133, 152)]
},
{
"LOCATION": [(31, 36), (41, 47), (50, 57), (99, 105), (335, 341), (381, 385), (447, 452)],
"PERSON": [(153, 161), (279, 289), (426, 436)]
},
{
"PERSON": [(13, 20), (29, 40), (103, 108), (194, 201), (233, 244), (388, 407), (410, 427), (438, 445),
(446, 456)]
},
{
"LOCATION": [(338, 342), (392, 397)],
"PERSON": [(0, 10), (142, 147), (149, 163), (248, 260), (262, 276), (304, 315)]
},
{
"LOCATION": [(45, 51), (52, 56), (77, 83), (258, 262), (320, 325)],
"PERSON": [(58, 68), (120, 125), (174, 182), (425, 435), (630, 640)]
},
{
"LOCATION": [(42, 46), (221, 227), (251, 255)],
"PERSON": [(74, 85)]
},
{
"LOCATION": [(92, 96), (194, 200), (262, 268)],
"PERSON": [(70, 80), (345, 356), (379, 408), (529, 536), (543, 551), (564, 581), (583, 603), (652, 670)]
},
{
"PERSON": [(27, 38), (53, 58), (68, 86)]
},
{
"LOCATION": [(319, 325)],
"PERSON": [(20, 25), (26, 36), (65, 80), (95, 105), (169, 185), (239, 243), (286, 296)]
},
{
"PERSON": [(31, 42), (607, 618)],
"LOCATION": [(137, 146), (260, 269), (399, 416), (506, 520), (673, 690)],
"ORG": [(722, 734)]
},
{
"PERSON": [(105, 120), (123, 139), (140, 151), (323, 333)]
},
{
"PERSON": [(13, 23), (95, 100)],
"LOCATION": [(179, 191), (195, 211)],
"ORG": [(102, 114)]
},
{
"PERSON": [(8, 19), (327, 344)]
},
{
"LOCATION": [(123, 133), (199, 206)],
"PERSON": [(89, 99)]
},
{
"LOCATION": [(31, 59)]
},
{
"LOCATION": [(43, 52)],
"PERSON": [(0, 16), (65, 85)]
},
{
"ORG": [(84, 140), (620, 626)],
"LOCATION": [(65, 71), (212, 229), (232, 253), (291, 301)],
"PERSON": [(576, 605), (628, 645)]
},
{
"LOCATION": [(153, 164), (290, 298)],
"PERSON": [(925, 941)]
},
{
"ORG": [(201, 243), (246, 249), (265, 276)],
"PERSON": [(72, 90), (173, 184)]
},
{
"LOCATION": [(42, 59), (68, 79), (123, 131), (142, 160)],
"ORG": [(98, 121)]
}
]
X_loaded, y_loaded = load_dataset(file_name)
self.assertIsInstance(X_loaded, list)
self.assertIsInstance(y_loaded, list)
self.assertEqual(len(X_true), len(X_loaded))
self.assertEqual(len(y_true), len(y_loaded))
for sample_idx in range(len(X_true)):
self.assertEqual(X_true[sample_idx], X_loaded[sample_idx])
self.assertIsInstance(y_loaded[sample_idx], dict)
self.assertEqual(set(y_true[sample_idx]), set(y_loaded[sample_idx]))
for ne_type in y_true[sample_idx]:
self.assertIsInstance(y_loaded[sample_idx][ne_type], list)
self.assertEqual(len(y_true[sample_idx][ne_type]), len(y_loaded[sample_idx][ne_type]),
msg='Sample {0}'.format(sample_idx))
for entity_idx in range(len(y_true[sample_idx][ne_type])):
self.assertEqual(y_true[sample_idx][ne_type][entity_idx], y_loaded[sample_idx][ne_type][entity_idx])
def test_calculate_prediction_quality(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
X_true, y_true = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
X_pred, y_pred = load_dataset(os.path.join(base_dir, 'predicted_named_entities.json'))
self.assertEqual(X_true, X_pred)
f1, precision, recall = BERT_NER.calculate_prediction_quality(y_true, y_pred, ('LOCATION', 'PERSON', 'ORG'))
self.assertIsInstance(f1, float)
self.assertIsInstance(precision, float)
self.assertIsInstance(recall, float)
self.assertAlmostEqual(f1, 0.842037, places=3)
self.assertAlmostEqual(precision, 0.908352, places=3)
self.assertAlmostEqual(recall, 0.784746, places=3)
def test_fit_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, lstm_units=32)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'logits_'))
self.assertTrue(hasattr(res, 'transition_params_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'input_ids_'))
self.assertTrue(hasattr(res, 'input_mask_'))
self.assertTrue(hasattr(res, 'segment_ids_'))
self.assertTrue(hasattr(res, 'y_ph_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
def test_fit_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=True, max_epochs=3, batch_size=2, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, lstm_units=32)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertEqual(res.random_seed, 42)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'logits_'))
self.assertTrue(hasattr(res, 'transition_params_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'input_ids_'))
self.assertTrue(hasattr(res, 'input_mask_'))
self.assertTrue(hasattr(res, 'segment_ids_'))
self.assertTrue(hasattr(res, 'y_ph_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
def test_fit_positive03(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, lstm_units=None, clip_norm=None)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertIsInstance(res.batch_size, int)
self.assertIsNone(res.lstm_units)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsNone(res.clip_norm, None)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'logits_'))
self.assertTrue(hasattr(res, 'transition_params_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'input_ids_'))
self.assertTrue(hasattr(res, 'input_mask_'))
self.assertTrue(hasattr(res, 'segment_ids_'))
self.assertTrue(hasattr(res, 'y_ph_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
def test_fit_predict(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'logits_'))
self.assertTrue(hasattr(res, 'transition_params_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'input_ids_'))
self.assertTrue(hasattr(res, 'input_mask_'))
self.assertTrue(hasattr(res, 'segment_ids_'))
self.assertTrue(hasattr(res, 'y_ph_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
y_pred = res.predict(X_train)
self.assertIsInstance(y_pred, list)
self.assertEqual(len(X_train), len(y_pred))
for sample_idx in range(len(y_pred)):
self.assertIsInstance(y_pred[sample_idx], dict)
f1, precision, recall = res.calculate_prediction_quality(y_train, y_pred, res.classes_list_)
self.assertGreater(f1, 0.0)
self.assertGreater(precision, 0.0)
self.assertGreater(recall, 0.0)
def test_predict_negative(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, random_seed=None)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
with self.assertRaises(NotFittedError):
_ = self.ner.predict(X_train)
def test_serialize_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'logits_'))
self.assertTrue(hasattr(res, 'transition_params_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'input_ids_'))
self.assertTrue(hasattr(res, 'input_mask_'))
self.assertTrue(hasattr(res, 'segment_ids_'))
self.assertTrue(hasattr(res, 'y_ph_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
y_pred1 = res.predict(X_train)
self.assertIsInstance(y_pred1, list)
self.assertEqual(len(X_train), len(y_pred1))
for sample_idx in range(len(y_pred1)):
self.assertIsInstance(y_pred1[sample_idx], dict)
f1, precision, recall = res.calculate_prediction_quality(y_train, y_pred1, res.classes_list_)
self.assertGreater(f1, 0.0)
self.assertGreater(precision, 0.0)
self.assertGreater(recall, 0.0)
self.temp_file_name = tempfile.NamedTemporaryFile(mode='w').name
with open(self.temp_file_name, mode='wb') as fp:
pickle.dump(res, fp)
del res, self.ner
gc.collect()
with open(self.temp_file_name, mode='rb') as fp:
self.ner = pickle.load(fp)
y_pred2 = self.ner.predict(X_train)
self.assertIsInstance(y_pred2, list)
self.assertEqual(len(y_pred2), len(y_pred2))
for sample_idx in range(len(y_pred2)):
self.assertIsInstance(y_pred2[sample_idx], dict)
self.assertEqual(set(y_pred1[sample_idx]), set(y_pred2[sample_idx]))
for ne_type in y_pred1[sample_idx]:
self.assertEqual(y_pred1[sample_idx][ne_type], y_pred2[sample_idx][ne_type])
def test_serialize_positive02(self):
self.ner = BERT_NER(random_seed=31)
old_batch_size = self.ner.batch_size
old_lstm_units = self.ner.lstm_units
old_lr = self.ner.lr
old_l2_reg = self.ner.l2_reg
old_clip_norm = self.ner.clip_norm
old_bert_hub_module_handle = self.ner.bert_hub_module_handle
old_finetune_bert = self.ner.finetune_bert
old_max_epochs = self.ner.max_epochs
old_patience = self.ner.patience
old_random_seed = self.ner.random_seed
old_gpu_memory_frac = self.ner.gpu_memory_frac
old_max_seq_length = self.ner.max_seq_length
old_validation_fraction = self.ner.validation_fraction
old_verbose = self.ner.verbose
self.temp_file_name = tempfile.NamedTemporaryFile().name
with open(self.temp_file_name, mode='wb') as fp:
pickle.dump(self.ner, fp)
del self.ner
gc.collect()
with open(self.temp_file_name, mode='rb') as fp:
self.ner = pickle.load(fp)
self.assertIsInstance(self.ner, BERT_NER)
self.assertTrue(hasattr(self.ner, 'batch_size'))
self.assertTrue(hasattr(self.ner, 'lstm_units'))
self.assertTrue(hasattr(self.ner, 'lr'))
self.assertTrue(hasattr(self.ner, 'l2_reg'))
self.assertTrue(hasattr(self.ner, 'clip_norm'))
self.assertTrue(hasattr(self.ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.ner, 'finetune_bert'))
self.assertTrue(hasattr(self.ner, 'max_epochs'))
self.assertTrue(hasattr(self.ner, 'patience'))
self.assertTrue(hasattr(self.ner, 'random_seed'))
self.assertTrue(hasattr(self.ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.ner, 'max_seq_length'))
self.assertTrue(hasattr(self.ner, 'validation_fraction'))
self.assertTrue(hasattr(self.ner, 'verbose'))
self.assertEqual(self.ner.batch_size, old_batch_size)
self.assertEqual(self.ner.lstm_units, old_lstm_units)
self.assertAlmostEqual(self.ner.lr, old_lr)
self.assertAlmostEqual(self.ner.l2_reg, old_l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, old_clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, old_bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, old_finetune_bert)
self.assertEqual(self.ner.max_epochs, old_max_epochs)
self.assertEqual(self.ner.patience, old_patience)
self.assertAlmostEqual(self.ner.gpu_memory_frac, old_gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, old_max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, old_validation_fraction)
self.assertEqual(self.ner.verbose, old_verbose)
self.assertEqual(self.ner.random_seed, old_random_seed)
def test_copy_positive01(self):
self.ner = BERT_NER(random_seed=0)
self.another_ner = copy.copy(self.ner)
self.assertIsInstance(self.another_ner, BERT_NER)
self.assertIsNot(self.ner, self.another_ner)
self.assertTrue(hasattr(self.another_ner, 'batch_size'))
self.assertTrue(hasattr(self.another_ner, 'lstm_units'))
self.assertTrue(hasattr(self.another_ner, 'lr'))
self.assertTrue(hasattr(self.another_ner, 'l2_reg'))
self.assertTrue(hasattr(self.another_ner, 'clip_norm'))
self.assertTrue(hasattr(self.another_ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.another_ner, 'finetune_bert'))
self.assertTrue(hasattr(self.another_ner, 'max_epochs'))
self.assertTrue(hasattr(self.another_ner, 'patience'))
self.assertTrue(hasattr(self.another_ner, 'random_seed'))
self.assertTrue(hasattr(self.another_ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.another_ner, 'max_seq_length'))
self.assertTrue(hasattr(self.another_ner, 'validation_fraction'))
self.assertTrue(hasattr(self.another_ner, 'verbose'))
self.assertEqual(self.ner.batch_size, self.another_ner.batch_size)
self.assertEqual(self.ner.lstm_units, self.another_ner.lstm_units)
self.assertAlmostEqual(self.ner.lr, self.another_ner.lr)
self.assertAlmostEqual(self.ner.l2_reg, self.another_ner.l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, self.another_ner.clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, self.another_ner.bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, self.another_ner.finetune_bert)
self.assertEqual(self.ner.max_epochs, self.another_ner.max_epochs)
self.assertEqual(self.ner.patience, self.another_ner.patience)
self.assertEqual(self.ner.random_seed, self.another_ner.random_seed)
self.assertAlmostEqual(self.ner.gpu_memory_frac, self.another_ner.gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, self.another_ner.max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, self.another_ner.validation_fraction)
self.assertEqual(self.ner.verbose, self.another_ner.verbose)
def test_copy_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None)
X_train, y_train = load_dataset(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
self.another_ner = copy.copy(self.ner)
self.assertIsInstance(self.another_ner, BERT_NER)
self.assertIsNot(self.ner, self.another_ner)
self.assertTrue(hasattr(self.another_ner, 'batch_size'))
self.assertTrue(hasattr(self.another_ner, 'lstm_units'))
self.assertTrue(hasattr(self.another_ner, 'lr'))
self.assertTrue(hasattr(self.another_ner, 'l2_reg'))
self.assertTrue(hasattr(self.another_ner, 'clip_norm'))
self.assertTrue(hasattr(self.another_ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.another_ner, 'finetune_bert'))
self.assertTrue(hasattr(self.another_ner, 'max_epochs'))
self.assertTrue(hasattr(self.another_ner, 'patience'))
self.assertTrue(hasattr(self.another_ner, 'random_seed'))
self.assertTrue(hasattr(self.another_ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.another_ner, 'max_seq_length'))
self.assertTrue(hasattr(self.another_ner, 'validation_fraction'))
self.assertTrue(hasattr(self.another_ner, 'verbose'))
self.assertTrue(hasattr(self.another_ner, 'classes_list_'))
self.assertTrue(hasattr(self.another_ner, 'logits_'))
self.assertTrue(hasattr(self.another_ner, 'transition_params_'))
self.assertTrue(hasattr(self.another_ner, 'tokenizer_'))
self.assertTrue(hasattr(self.another_ner, 'input_ids_'))
self.assertTrue(hasattr(self.another_ner, 'input_mask_'))
self.assertTrue(hasattr(self.another_ner, 'segment_ids_'))
self.assertTrue(hasattr(self.another_ner, 'y_ph_'))
self.assertTrue(hasattr(self.another_ner, 'sess_'))
self.assertEqual(self.ner.batch_size, self.another_ner.batch_size)
self.assertEqual(self.ner.lstm_units, self.another_ner.lstm_units)
self.assertAlmostEqual(self.ner.lr, self.another_ner.lr)
self.assertAlmostEqual(self.ner.l2_reg, self.another_ner.l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, self.another_ner.clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, self.another_ner.bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, self.another_ner.finetune_bert)
self.assertEqual(self.ner.max_epochs, self.another_ner.max_epochs)
self.assertEqual(self.ner.patience, self.another_ner.patience)
self.assertEqual(self.ner.random_seed, self.another_ner.random_seed)
self.assertAlmostEqual(self.ner.gpu_memory_frac, self.another_ner.gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, self.another_ner.max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, self.another_ner.validation_fraction)
self.assertEqual(self.ner.verbose, self.another_ner.verbose)
self.assertIs(self.ner.classes_list_, self.another_ner.classes_list_)
self.assertIs(self.ner.logits_, self.another_ner.logits_)
self.assertIs(self.ner.transition_params_, self.another_ner.transition_params_)
self.assertIs(self.ner.tokenizer_, self.another_ner.tokenizer_)
self.assertIs(self.ner.input_ids_, self.another_ner.input_ids_)
self.assertIs(self.ner.input_mask_, self.another_ner.input_mask_)
self.assertIs(self.ner.segment_ids_, self.another_ner.segment_ids_)
self.assertIs(self.ner.y_ph_, self.another_ner.y_ph_)
self.assertIs(self.ner.sess_, self.another_ner.sess_)
def test_calculate_bounds_of_named_entities(self):
bounds_of_tokens = [(0, 2), (2, 5), (5, 8), (8, 10), (11, 16), (17, 20), (20, 22), (22, 26), (26, 27), (28, 31),
(31, 34), (34, 37), (38, 48), (49, 52), (52, 54), (55, 57), (58, 59), (59, 61), (61, 63),
(64, 70), (71, 83), (84, 87), (87, 90), (90, 93), (93, 95), (95, 98), (98, 99)]
classes_list = ('LOCATION', 'ORG', 'PERSON')
labels_of_tokens = [0, 0, 2, 1, 1, 2, 1, 0, 0, 0, 4, 3, 0, 6, 5, 5, 5, 0, 5, 5, 0, 2, 2, 3, 3, 6, 5]
true_entities = {
'LOCATION': [(5, 16), (17, 22), (84, 87), (87, 90)],
'ORG': [(31, 37), (90, 95)],
'PERSON': [(49, 59), (61, 70), (95, 99)]
}
calc_entities = BERT_NER.calculate_bounds_of_named_entities(bounds_of_tokens, classes_list, labels_of_tokens)
self.assertIsInstance(calc_entities, dict)
self.assertEqual(set(true_entities.keys()), set(calc_entities.keys()))
for entity_type in true_entities:
self.assertEqual(true_entities[entity_type], calc_entities[entity_type])
if __name__ == '__main__':
unittest.main(verbosity=2)
| 61.430567
| 120
| 0.616681
|
957bb06f334ac72b2722d2c1963ba9c688792a6e
| 113,053
|
gyp
|
Python
|
grpc.gyp
|
splittingred/grpc
|
b4a5727149201bac53a33c53c2cf93fed5414540
|
[
"Apache-2.0"
] | 1
|
2019-06-17T18:45:53.000Z
|
2019-06-17T18:45:53.000Z
|
grpc.gyp
|
splittingred/grpc
|
b4a5727149201bac53a33c53c2cf93fed5414540
|
[
"Apache-2.0"
] | null | null | null |
grpc.gyp
|
splittingred/grpc
|
b4a5727149201bac53a33c53c2cf93fed5414540
|
[
"Apache-2.0"
] | null | null | null |
# GRPC GYP build file
# This file has been automatically generated from a template file.
# Please look at the templates directory instead.
# This file can be regenerated from the template by running
# tools/buildgen/generate_projects.sh
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
{
'variables': {
# The openssl and zlib dependencies must be passed in as variables
# defined in an included gypi file, usually common.gypi.
'openssl_gyp_target%': 'Please Define openssl_gyp_target variable',
'zlib_gyp_target%': 'Please Define zlib_gyp_target variable',
'grpc_gcov%': 'false',
'grpc_alpine%': 'false',
},
'target_defaults': {
'configurations': {
'Release': {
'cflags': [
'-O2',
'-Wframe-larger-than=16384',
],
'defines': [
'NDEBUG',
],
},
'Debug': {
'cflags': [
'-O0',
],
'defines': [
'_DEBUG',
'DEBUG',
],
},
},
'cflags': [
'-g',
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-Wno-unused-parameter',
'-DOSATOMIC_USE_INLINED=1',
'-Wno-deprecated-declarations',
],
'ldflags': [
'-g',
],
'cflags_c': [
'-Werror',
'-std=c99',
],
'cflags_cc': [
'-Werror',
'-std=c++11',
],
'include_dirs': [
'.',
'../..',
'include',
],
'defines': [
'GRPC_ARES=0',
],
'dependencies': [
'<(openssl_gyp_target)',
'<(zlib_gyp_target)',
],
'conditions': [
['grpc_gcov=="true"', {
'cflags': [
'-O0',
'-fprofile-arcs',
'-ftest-coverage',
'-Wno-return-type',
],
'defines': [
'_DEBUG',
'DEBUG',
'GPR_GCOV',
],
'ldflags': [
'-fprofile-arcs',
'-ftest-coverage',
'-rdynamic',
'-lstdc++',
],
}],
['grpc_alpine=="true"', {
'defines': [
'GPR_MUSL_LIBC_COMPAT'
]
}],
['OS == "win"', {
'defines': [
'_WIN32_WINNT=0x0600',
'WIN32_LEAN_AND_MEAN',
'_HAS_EXCEPTIONS=0',
'UNICODE',
'_UNICODE',
'NOMINMAX',
],
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeLibrary': 1, # static debug
}
},
"libraries": [
"ws2_32"
]
}],
['OS == "mac"', {
'xcode_settings': {
'OTHER_CFLAGS': [
'-g',
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-Wno-unused-parameter',
'-DOSATOMIC_USE_INLINED=1',
'-Wno-deprecated-declarations',
],
'OTHER_CPLUSPLUSFLAGS': [
'-g',
'-Wall',
'-Wextra',
'-Werror',
'-Wno-long-long',
'-Wno-unused-parameter',
'-DOSATOMIC_USE_INLINED=1',
'-Wno-deprecated-declarations',
'-stdlib=libc++',
'-std=c++11',
'-Wno-error=deprecated-declarations',
],
},
}]
]
},
'targets': [
{
'target_name': 'alts_test_util',
'type': 'static_library',
'dependencies': [
'grpc',
],
'sources': [
'test/core/tsi/alts/crypt/gsec_test_util.cc',
'test/core/tsi/alts/handshaker/alts_handshaker_service_api_test_lib.cc',
],
},
{
'target_name': 'gpr',
'type': 'static_library',
'dependencies': [
],
'sources': [
'src/core/lib/gpr/alloc.cc',
'src/core/lib/gpr/arena.cc',
'src/core/lib/gpr/atm.cc',
'src/core/lib/gpr/cpu_iphone.cc',
'src/core/lib/gpr/cpu_linux.cc',
'src/core/lib/gpr/cpu_posix.cc',
'src/core/lib/gpr/cpu_windows.cc',
'src/core/lib/gpr/env_linux.cc',
'src/core/lib/gpr/env_posix.cc',
'src/core/lib/gpr/env_windows.cc',
'src/core/lib/gpr/fork.cc',
'src/core/lib/gpr/host_port.cc',
'src/core/lib/gpr/log.cc',
'src/core/lib/gpr/log_android.cc',
'src/core/lib/gpr/log_linux.cc',
'src/core/lib/gpr/log_posix.cc',
'src/core/lib/gpr/log_windows.cc',
'src/core/lib/gpr/mpscq.cc',
'src/core/lib/gpr/murmur_hash.cc',
'src/core/lib/gpr/string.cc',
'src/core/lib/gpr/string_posix.cc',
'src/core/lib/gpr/string_util_windows.cc',
'src/core/lib/gpr/string_windows.cc',
'src/core/lib/gpr/sync.cc',
'src/core/lib/gpr/sync_posix.cc',
'src/core/lib/gpr/sync_windows.cc',
'src/core/lib/gpr/time.cc',
'src/core/lib/gpr/time_posix.cc',
'src/core/lib/gpr/time_precise.cc',
'src/core/lib/gpr/time_windows.cc',
'src/core/lib/gpr/tls_pthread.cc',
'src/core/lib/gpr/tmpfile_msys.cc',
'src/core/lib/gpr/tmpfile_posix.cc',
'src/core/lib/gpr/tmpfile_windows.cc',
'src/core/lib/gpr/wrap_memcpy.cc',
'src/core/lib/gprpp/thd_posix.cc',
'src/core/lib/gprpp/thd_windows.cc',
'src/core/lib/profiling/basic_timers.cc',
'src/core/lib/profiling/stap_timers.cc',
],
},
{
'target_name': 'gpr_test_util',
'type': 'static_library',
'dependencies': [
'gpr',
],
'sources': [
'test/core/util/test_config.cc',
],
},
{
'target_name': 'grpc',
'type': 'static_library',
'dependencies': [
'gpr',
],
'sources': [
'src/core/lib/surface/init.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channel_trace_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_factory.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_epollsig_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/network_status_tracker.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_cv.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/json/json.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_string.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/service_config.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/debug/trace.cc',
'src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/lib/http/httpcli_security_connector.cc',
'src/core/lib/security/context/security_context.cc',
'src/core/lib/security/credentials/alts/alts_credentials.cc',
'src/core/lib/security/credentials/composite/composite_credentials.cc',
'src/core/lib/security/credentials/credentials.cc',
'src/core/lib/security/credentials/credentials_metadata.cc',
'src/core/lib/security/credentials/fake/fake_credentials.cc',
'src/core/lib/security/credentials/google_default/credentials_generic.cc',
'src/core/lib/security/credentials/google_default/google_default_credentials.cc',
'src/core/lib/security/credentials/iam/iam_credentials.cc',
'src/core/lib/security/credentials/jwt/json_token.cc',
'src/core/lib/security/credentials/jwt/jwt_credentials.cc',
'src/core/lib/security/credentials/jwt/jwt_verifier.cc',
'src/core/lib/security/credentials/oauth2/oauth2_credentials.cc',
'src/core/lib/security/credentials/plugin/plugin_credentials.cc',
'src/core/lib/security/credentials/ssl/ssl_credentials.cc',
'src/core/lib/security/security_connector/alts_security_connector.cc',
'src/core/lib/security/security_connector/security_connector.cc',
'src/core/lib/security/transport/client_auth_filter.cc',
'src/core/lib/security/transport/secure_endpoint.cc',
'src/core/lib/security/transport/security_handshaker.cc',
'src/core/lib/security/transport/server_auth_filter.cc',
'src/core/lib/security/transport/target_authority_table.cc',
'src/core/lib/security/transport/tsi_error.cc',
'src/core/lib/security/util/json_util.cc',
'src/core/lib/surface/init_secure.cc',
'src/core/tsi/alts/crypt/aes_gcm.cc',
'src/core/tsi/alts/crypt/gsec.cc',
'src/core/tsi/alts/frame_protector/alts_counter.cc',
'src/core/tsi/alts/frame_protector/alts_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_frame_protector.cc',
'src/core/tsi/alts/frame_protector/alts_record_protocol_crypter_common.cc',
'src/core/tsi/alts/frame_protector/alts_seal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/alts_unseal_privacy_integrity_crypter.cc',
'src/core/tsi/alts/frame_protector/frame_handler.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_client.cc',
'src/core/tsi/alts/handshaker/alts_tsi_event.cc',
'src/core/tsi/alts/handshaker/alts_tsi_handshaker.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_integrity_only_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_privacy_integrity_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_grpc_record_protocol_common.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_iovec_record_protocol.cc',
'src/core/tsi/alts/zero_copy_frame_protector/alts_zero_copy_grpc_protector.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_linux.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_no_op.cc',
'src/core/lib/security/credentials/alts/check_gcp_environment_windows.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_client_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_options.cc',
'src/core/lib/security/credentials/alts/grpc_alts_credentials_server_options.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_service_api.cc',
'src/core/tsi/alts/handshaker/alts_handshaker_service_api_util.cc',
'src/core/tsi/alts/handshaker/alts_tsi_utils.cc',
'src/core/tsi/alts/handshaker/transport_security_common_api.cc',
'src/core/tsi/alts/handshaker/altscontext.pb.c',
'src/core/tsi/alts/handshaker/handshaker.pb.c',
'src/core/tsi/alts/handshaker/transport_security_common.pb.c',
'third_party/nanopb/pb_common.c',
'third_party/nanopb/pb_decode.c',
'third_party/nanopb/pb_encode.c',
'src/core/tsi/transport_security.cc',
'src/core/tsi/transport_security_adapter.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/connector.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy_factory.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/method_params.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_index.cc',
'src/core/ext/filters/client_channel/uri_parser.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/tsi/alts_transport_security.cc',
'src/core/tsi/fake_transport_security.cc',
'src/core/tsi/ssl/session_cache/ssl_session_boringssl.cc',
'src/core/tsi/ssl/session_cache/ssl_session_cache.cc',
'src/core/tsi/ssl/session_cache/ssl_session_openssl.cc',
'src/core/tsi/ssl_transport_security.cc',
'src/core/tsi/transport_security_grpc.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/client/secure/secure_channel_create.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel_secure.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/subchannel_list.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/load_reporting/server_load_reporting_filter.cc',
'src/core/ext/filters/load_reporting/server_load_reporting_plugin.cc',
'src/core/ext/census/grpc_context.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/plugin_registry/grpc_plugin_registry.cc',
],
},
{
'target_name': 'grpc_dll',
'type': 'static_library',
'dependencies': [
'gpr',
'grpc',
],
'sources': [
],
},
{
'target_name': 'grpc_test_util',
'type': 'static_library',
'dependencies': [
'gpr_test_util',
'gpr',
'grpc',
],
'sources': [
'test/core/end2end/data/client_certs.cc',
'test/core/end2end/data/server1_cert.cc',
'test/core/end2end/data/server1_key.cc',
'test/core/end2end/data/test_root_cert.cc',
'test/core/security/oauth2_utils.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'test/core/end2end/cq_verifier.cc',
'test/core/end2end/fixtures/http_proxy_fixture.cc',
'test/core/end2end/fixtures/proxy.cc',
'test/core/iomgr/endpoint_tests.cc',
'test/core/util/debugger_macros.cc',
'test/core/util/grpc_profiler.cc',
'test/core/util/histogram.cc',
'test/core/util/memory_counters.cc',
'test/core/util/mock_endpoint.cc',
'test/core/util/parse_hexstring.cc',
'test/core/util/passthru_endpoint.cc',
'test/core/util/port.cc',
'test/core/util/port_isolated_runtime_environment.cc',
'test/core/util/port_server_client.cc',
'test/core/util/slice_splitter.cc',
'test/core/util/subprocess_posix.cc',
'test/core/util/subprocess_windows.cc',
'test/core/util/tracer_util.cc',
'test/core/util/trickle_endpoint.cc',
'test/core/util/cmdline.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channel_trace_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_factory.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_epollsig_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/network_status_tracker.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_cv.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/json/json.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_string.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/service_config.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/debug/trace.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/connector.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy_factory.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/method_params.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_index.cc',
'src/core/ext/filters/client_channel/uri_parser.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
],
},
{
'target_name': 'grpc_test_util_unsecure',
'type': 'static_library',
'dependencies': [
'gpr',
'gpr_test_util',
'grpc_unsecure',
],
'sources': [
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'test/core/end2end/cq_verifier.cc',
'test/core/end2end/fixtures/http_proxy_fixture.cc',
'test/core/end2end/fixtures/proxy.cc',
'test/core/iomgr/endpoint_tests.cc',
'test/core/util/debugger_macros.cc',
'test/core/util/grpc_profiler.cc',
'test/core/util/histogram.cc',
'test/core/util/memory_counters.cc',
'test/core/util/mock_endpoint.cc',
'test/core/util/parse_hexstring.cc',
'test/core/util/passthru_endpoint.cc',
'test/core/util/port.cc',
'test/core/util/port_isolated_runtime_environment.cc',
'test/core/util/port_server_client.cc',
'test/core/util/slice_splitter.cc',
'test/core/util/subprocess_posix.cc',
'test/core/util/subprocess_windows.cc',
'test/core/util/tracer_util.cc',
'test/core/util/trickle_endpoint.cc',
'test/core/util/cmdline.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channel_trace_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_factory.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_epollsig_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/network_status_tracker.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_cv.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/json/json.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_string.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/service_config.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/debug/trace.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/connector.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy_factory.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/method_params.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_index.cc',
'src/core/ext/filters/client_channel/uri_parser.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
],
},
{
'target_name': 'grpc_unsecure',
'type': 'static_library',
'dependencies': [
'gpr',
],
'sources': [
'src/core/lib/surface/init.cc',
'src/core/lib/surface/init_unsecure.cc',
'src/core/lib/avl/avl.cc',
'src/core/lib/backoff/backoff.cc',
'src/core/lib/channel/channel_args.cc',
'src/core/lib/channel/channel_stack.cc',
'src/core/lib/channel/channel_stack_builder.cc',
'src/core/lib/channel/channel_trace.cc',
'src/core/lib/channel/channel_trace_registry.cc',
'src/core/lib/channel/connected_channel.cc',
'src/core/lib/channel/handshaker.cc',
'src/core/lib/channel/handshaker_factory.cc',
'src/core/lib/channel/handshaker_registry.cc',
'src/core/lib/channel/status_util.cc',
'src/core/lib/compression/compression.cc',
'src/core/lib/compression/compression_internal.cc',
'src/core/lib/compression/message_compress.cc',
'src/core/lib/compression/stream_compression.cc',
'src/core/lib/compression/stream_compression_gzip.cc',
'src/core/lib/compression/stream_compression_identity.cc',
'src/core/lib/debug/stats.cc',
'src/core/lib/debug/stats_data.cc',
'src/core/lib/http/format_request.cc',
'src/core/lib/http/httpcli.cc',
'src/core/lib/http/parser.cc',
'src/core/lib/iomgr/call_combiner.cc',
'src/core/lib/iomgr/combiner.cc',
'src/core/lib/iomgr/endpoint.cc',
'src/core/lib/iomgr/endpoint_pair_posix.cc',
'src/core/lib/iomgr/endpoint_pair_uv.cc',
'src/core/lib/iomgr/endpoint_pair_windows.cc',
'src/core/lib/iomgr/error.cc',
'src/core/lib/iomgr/ev_epoll1_linux.cc',
'src/core/lib/iomgr/ev_epollex_linux.cc',
'src/core/lib/iomgr/ev_epollsig_linux.cc',
'src/core/lib/iomgr/ev_poll_posix.cc',
'src/core/lib/iomgr/ev_posix.cc',
'src/core/lib/iomgr/ev_windows.cc',
'src/core/lib/iomgr/exec_ctx.cc',
'src/core/lib/iomgr/executor.cc',
'src/core/lib/iomgr/fork_posix.cc',
'src/core/lib/iomgr/fork_windows.cc',
'src/core/lib/iomgr/gethostname_fallback.cc',
'src/core/lib/iomgr/gethostname_host_name_max.cc',
'src/core/lib/iomgr/gethostname_sysconf.cc',
'src/core/lib/iomgr/iocp_windows.cc',
'src/core/lib/iomgr/iomgr.cc',
'src/core/lib/iomgr/iomgr_custom.cc',
'src/core/lib/iomgr/iomgr_internal.cc',
'src/core/lib/iomgr/iomgr_posix.cc',
'src/core/lib/iomgr/iomgr_uv.cc',
'src/core/lib/iomgr/iomgr_windows.cc',
'src/core/lib/iomgr/is_epollexclusive_available.cc',
'src/core/lib/iomgr/load_file.cc',
'src/core/lib/iomgr/lockfree_event.cc',
'src/core/lib/iomgr/network_status_tracker.cc',
'src/core/lib/iomgr/polling_entity.cc',
'src/core/lib/iomgr/pollset.cc',
'src/core/lib/iomgr/pollset_custom.cc',
'src/core/lib/iomgr/pollset_set.cc',
'src/core/lib/iomgr/pollset_set_custom.cc',
'src/core/lib/iomgr/pollset_set_windows.cc',
'src/core/lib/iomgr/pollset_uv.cc',
'src/core/lib/iomgr/pollset_windows.cc',
'src/core/lib/iomgr/resolve_address.cc',
'src/core/lib/iomgr/resolve_address_custom.cc',
'src/core/lib/iomgr/resolve_address_posix.cc',
'src/core/lib/iomgr/resolve_address_windows.cc',
'src/core/lib/iomgr/resource_quota.cc',
'src/core/lib/iomgr/sockaddr_utils.cc',
'src/core/lib/iomgr/socket_factory_posix.cc',
'src/core/lib/iomgr/socket_mutator.cc',
'src/core/lib/iomgr/socket_utils_common_posix.cc',
'src/core/lib/iomgr/socket_utils_linux.cc',
'src/core/lib/iomgr/socket_utils_posix.cc',
'src/core/lib/iomgr/socket_utils_uv.cc',
'src/core/lib/iomgr/socket_utils_windows.cc',
'src/core/lib/iomgr/socket_windows.cc',
'src/core/lib/iomgr/tcp_client.cc',
'src/core/lib/iomgr/tcp_client_custom.cc',
'src/core/lib/iomgr/tcp_client_posix.cc',
'src/core/lib/iomgr/tcp_client_windows.cc',
'src/core/lib/iomgr/tcp_custom.cc',
'src/core/lib/iomgr/tcp_posix.cc',
'src/core/lib/iomgr/tcp_server.cc',
'src/core/lib/iomgr/tcp_server_custom.cc',
'src/core/lib/iomgr/tcp_server_posix.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_common.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_ifaddrs.cc',
'src/core/lib/iomgr/tcp_server_utils_posix_noifaddrs.cc',
'src/core/lib/iomgr/tcp_server_windows.cc',
'src/core/lib/iomgr/tcp_uv.cc',
'src/core/lib/iomgr/tcp_windows.cc',
'src/core/lib/iomgr/time_averaged_stats.cc',
'src/core/lib/iomgr/timer.cc',
'src/core/lib/iomgr/timer_custom.cc',
'src/core/lib/iomgr/timer_generic.cc',
'src/core/lib/iomgr/timer_heap.cc',
'src/core/lib/iomgr/timer_manager.cc',
'src/core/lib/iomgr/timer_uv.cc',
'src/core/lib/iomgr/udp_server.cc',
'src/core/lib/iomgr/unix_sockets_posix.cc',
'src/core/lib/iomgr/unix_sockets_posix_noop.cc',
'src/core/lib/iomgr/wakeup_fd_cv.cc',
'src/core/lib/iomgr/wakeup_fd_eventfd.cc',
'src/core/lib/iomgr/wakeup_fd_nospecial.cc',
'src/core/lib/iomgr/wakeup_fd_pipe.cc',
'src/core/lib/iomgr/wakeup_fd_posix.cc',
'src/core/lib/json/json.cc',
'src/core/lib/json/json_reader.cc',
'src/core/lib/json/json_string.cc',
'src/core/lib/json/json_writer.cc',
'src/core/lib/slice/b64.cc',
'src/core/lib/slice/percent_encoding.cc',
'src/core/lib/slice/slice.cc',
'src/core/lib/slice/slice_buffer.cc',
'src/core/lib/slice/slice_intern.cc',
'src/core/lib/slice/slice_string_helpers.cc',
'src/core/lib/surface/api_trace.cc',
'src/core/lib/surface/byte_buffer.cc',
'src/core/lib/surface/byte_buffer_reader.cc',
'src/core/lib/surface/call.cc',
'src/core/lib/surface/call_details.cc',
'src/core/lib/surface/call_log_batch.cc',
'src/core/lib/surface/channel.cc',
'src/core/lib/surface/channel_init.cc',
'src/core/lib/surface/channel_ping.cc',
'src/core/lib/surface/channel_stack_type.cc',
'src/core/lib/surface/completion_queue.cc',
'src/core/lib/surface/completion_queue_factory.cc',
'src/core/lib/surface/event_string.cc',
'src/core/lib/surface/lame_client.cc',
'src/core/lib/surface/metadata_array.cc',
'src/core/lib/surface/server.cc',
'src/core/lib/surface/validate_metadata.cc',
'src/core/lib/surface/version.cc',
'src/core/lib/transport/bdp_estimator.cc',
'src/core/lib/transport/byte_stream.cc',
'src/core/lib/transport/connectivity_state.cc',
'src/core/lib/transport/error_utils.cc',
'src/core/lib/transport/metadata.cc',
'src/core/lib/transport/metadata_batch.cc',
'src/core/lib/transport/pid_controller.cc',
'src/core/lib/transport/service_config.cc',
'src/core/lib/transport/static_metadata.cc',
'src/core/lib/transport/status_conversion.cc',
'src/core/lib/transport/status_metadata.cc',
'src/core/lib/transport/timeout_encoding.cc',
'src/core/lib/transport/transport.cc',
'src/core/lib/transport/transport_op_string.cc',
'src/core/lib/debug/trace.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2.cc',
'src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.cc',
'src/core/ext/transport/chttp2/transport/bin_decoder.cc',
'src/core/ext/transport/chttp2/transport/bin_encoder.cc',
'src/core/ext/transport/chttp2/transport/chttp2_plugin.cc',
'src/core/ext/transport/chttp2/transport/chttp2_transport.cc',
'src/core/ext/transport/chttp2/transport/flow_control.cc',
'src/core/ext/transport/chttp2/transport/frame_data.cc',
'src/core/ext/transport/chttp2/transport/frame_goaway.cc',
'src/core/ext/transport/chttp2/transport/frame_ping.cc',
'src/core/ext/transport/chttp2/transport/frame_rst_stream.cc',
'src/core/ext/transport/chttp2/transport/frame_settings.cc',
'src/core/ext/transport/chttp2/transport/frame_window_update.cc',
'src/core/ext/transport/chttp2/transport/hpack_encoder.cc',
'src/core/ext/transport/chttp2/transport/hpack_parser.cc',
'src/core/ext/transport/chttp2/transport/hpack_table.cc',
'src/core/ext/transport/chttp2/transport/http2_settings.cc',
'src/core/ext/transport/chttp2/transport/huffsyms.cc',
'src/core/ext/transport/chttp2/transport/incoming_metadata.cc',
'src/core/ext/transport/chttp2/transport/parsing.cc',
'src/core/ext/transport/chttp2/transport/stream_lists.cc',
'src/core/ext/transport/chttp2/transport/stream_map.cc',
'src/core/ext/transport/chttp2/transport/varint.cc',
'src/core/ext/transport/chttp2/transport/writing.cc',
'src/core/ext/transport/chttp2/alpn/alpn.cc',
'src/core/ext/filters/http/client/http_client_filter.cc',
'src/core/ext/filters/http/http_filters_plugin.cc',
'src/core/ext/filters/http/message_compress/message_compress_filter.cc',
'src/core/ext/filters/http/server/http_server_filter.cc',
'src/core/ext/transport/chttp2/server/chttp2_server.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create.cc',
'src/core/ext/transport/chttp2/client/insecure/channel_create_posix.cc',
'src/core/ext/transport/chttp2/client/chttp2_connector.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
'src/core/ext/filters/client_channel/channel_connectivity.cc',
'src/core/ext/filters/client_channel/client_channel.cc',
'src/core/ext/filters/client_channel/client_channel_factory.cc',
'src/core/ext/filters/client_channel/client_channel_plugin.cc',
'src/core/ext/filters/client_channel/connector.cc',
'src/core/ext/filters/client_channel/http_connect_handshaker.cc',
'src/core/ext/filters/client_channel/http_proxy.cc',
'src/core/ext/filters/client_channel/lb_policy.cc',
'src/core/ext/filters/client_channel/lb_policy_factory.cc',
'src/core/ext/filters/client_channel/lb_policy_registry.cc',
'src/core/ext/filters/client_channel/method_params.cc',
'src/core/ext/filters/client_channel/parse_address.cc',
'src/core/ext/filters/client_channel/proxy_mapper.cc',
'src/core/ext/filters/client_channel/proxy_mapper_registry.cc',
'src/core/ext/filters/client_channel/resolver.cc',
'src/core/ext/filters/client_channel/resolver_registry.cc',
'src/core/ext/filters/client_channel/retry_throttle.cc',
'src/core/ext/filters/client_channel/subchannel.cc',
'src/core/ext/filters/client_channel/subchannel_index.cc',
'src/core/ext/filters/client_channel/uri_parser.cc',
'src/core/ext/filters/deadline/deadline_filter.cc',
'src/core/ext/transport/inproc/inproc_plugin.cc',
'src/core/ext/transport/inproc/inproc_transport.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/dns_resolver_ares.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_ev_driver_posix.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper.cc',
'src/core/ext/filters/client_channel/resolver/dns/c_ares/grpc_ares_wrapper_fallback.cc',
'src/core/ext/filters/client_channel/resolver/dns/native/dns_resolver.cc',
'src/core/ext/filters/client_channel/resolver/sockaddr/sockaddr_resolver.cc',
'src/core/ext/filters/client_channel/resolver/fake/fake_resolver.cc',
'src/core/ext/filters/load_reporting/server_load_reporting_filter.cc',
'src/core/ext/filters/load_reporting/server_load_reporting_plugin.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/client_load_reporting_filter.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'third_party/nanopb/pb_common.c',
'third_party/nanopb/pb_decode.c',
'third_party/nanopb/pb_encode.c',
'src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc',
'src/core/ext/filters/client_channel/lb_policy/subchannel_list.cc',
'src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc',
'src/core/ext/census/grpc_context.cc',
'src/core/ext/filters/max_age/max_age_filter.cc',
'src/core/ext/filters/message_size/message_size_filter.cc',
'src/core/ext/filters/workarounds/workaround_cronet_compression_filter.cc',
'src/core/ext/filters/workarounds/workaround_utils.cc',
'src/core/plugin_registry/grpc_unsecure_plugin_registry.cc',
],
},
{
'target_name': 'reconnect_server',
'type': 'static_library',
'dependencies': [
'test_tcp_server',
'grpc_test_util',
'grpc',
'gpr_test_util',
'gpr',
],
'sources': [
'test/core/util/reconnect_server.cc',
],
},
{
'target_name': 'test_tcp_server',
'type': 'static_library',
'dependencies': [
'grpc_test_util',
'grpc',
'gpr_test_util',
'gpr',
],
'sources': [
'test/core/util/test_tcp_server.cc',
],
},
{
'target_name': 'grpc++',
'type': 'static_library',
'dependencies': [
'grpc',
'gpr',
],
'sources': [
'src/cpp/client/insecure_credentials.cc',
'src/cpp/client/secure_credentials.cc',
'src/cpp/common/auth_property_iterator.cc',
'src/cpp/common/secure_auth_context.cc',
'src/cpp/common/secure_channel_arguments.cc',
'src/cpp/common/secure_create_auth_context.cc',
'src/cpp/server/insecure_server_credentials.cc',
'src/cpp/server/secure_server_credentials.cc',
'src/cpp/client/channel_cc.cc',
'src/cpp/client/client_context.cc',
'src/cpp/client/create_channel.cc',
'src/cpp/client/create_channel_internal.cc',
'src/cpp/client/create_channel_posix.cc',
'src/cpp/client/credentials_cc.cc',
'src/cpp/client/generic_stub.cc',
'src/cpp/common/alarm.cc',
'src/cpp/common/channel_arguments.cc',
'src/cpp/common/channel_filter.cc',
'src/cpp/common/completion_queue_cc.cc',
'src/cpp/common/core_codegen.cc',
'src/cpp/common/resource_quota_cc.cc',
'src/cpp/common/rpc_method.cc',
'src/cpp/common/version_cc.cc',
'src/cpp/server/async_generic_service.cc',
'src/cpp/server/channel_argument_option.cc',
'src/cpp/server/create_default_thread_pool.cc',
'src/cpp/server/dynamic_thread_pool.cc',
'src/cpp/server/health/default_health_check_service.cc',
'src/cpp/server/health/health.pb.c',
'src/cpp/server/health/health_check_service.cc',
'src/cpp/server/health/health_check_service_server_builder_option.cc',
'src/cpp/server/server_builder.cc',
'src/cpp/server/server_cc.cc',
'src/cpp/server/server_context.cc',
'src/cpp/server/server_credentials.cc',
'src/cpp/server/server_posix.cc',
'src/cpp/thread_manager/thread_manager.cc',
'src/cpp/util/byte_buffer_cc.cc',
'src/cpp/util/slice_cc.cc',
'src/cpp/util/status.cc',
'src/cpp/util/string_ref.cc',
'src/cpp/util/time_cc.cc',
'src/cpp/codegen/codegen_init.cc',
],
},
{
'target_name': 'grpc++_core_stats',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/proto/grpc/core/stats.proto',
'src/cpp/util/core_stats.cc',
],
},
{
'target_name': 'grpc++_error_details',
'type': 'static_library',
'dependencies': [
'grpc++',
],
'sources': [
'src/proto/grpc/status/status.proto',
'src/cpp/util/error_details.cc',
],
},
{
'target_name': 'grpc++_proto_reflection_desc_db',
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
],
'sources': [
'test/cpp/util/proto_reflection_descriptor_database.cc',
'src/proto/grpc/reflection/v1alpha/reflection.proto',
],
},
{
'target_name': 'grpc++_reflection',
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc',
],
'sources': [
'src/cpp/ext/proto_server_reflection.cc',
'src/cpp/ext/proto_server_reflection_plugin.cc',
'src/proto/grpc/reflection/v1alpha/reflection.proto',
],
},
{
'target_name': 'grpc++_test_config',
'type': 'static_library',
'dependencies': [
],
'sources': [
'test/cpp/util/test_config_cc.cc',
],
},
{
'target_name': 'grpc++_test_util',
'type': 'static_library',
'dependencies': [
'grpc++',
'grpc_test_util',
'grpc',
],
'sources': [
'src/proto/grpc/channelz/channelz.proto',
'src/proto/grpc/health/v1/health.proto',
'src/proto/grpc/testing/echo_messages.proto',
'src/proto/grpc/testing/echo.proto',
'src/proto/grpc/testing/duplicate/echo_duplicate.proto',
'test/cpp/end2end/test_service_impl.cc',
'test/cpp/util/byte_buffer_proto_helper.cc',
'test/cpp/util/channel_trace_proto_helper.cc',
'test/cpp/util/create_test_channel.cc',
'test/cpp/util/string_ref_helper.cc',
'test/cpp/util/subprocess.cc',
'test/cpp/util/test_credentials_provider.cc',
'src/cpp/codegen/codegen_init.cc',
],
},
{
'target_name': 'grpc++_test_util_unsecure',
'type': 'static_library',
'dependencies': [
'grpc++_unsecure',
'grpc_test_util_unsecure',
'grpc_unsecure',
],
'sources': [
'src/proto/grpc/health/v1/health.proto',
'src/proto/grpc/testing/echo_messages.proto',
'src/proto/grpc/testing/echo.proto',
'src/proto/grpc/testing/duplicate/echo_duplicate.proto',
'test/cpp/end2end/test_service_impl.cc',
'test/cpp/util/byte_buffer_proto_helper.cc',
'test/cpp/util/string_ref_helper.cc',
'test/cpp/util/subprocess.cc',
'src/cpp/codegen/codegen_init.cc',
],
},
{
'target_name': 'grpc++_unsecure',
'type': 'static_library',
'dependencies': [
'gpr',
'grpc_unsecure',
],
'sources': [
'src/cpp/client/insecure_credentials.cc',
'src/cpp/common/insecure_create_auth_context.cc',
'src/cpp/server/insecure_server_credentials.cc',
'src/cpp/client/channel_cc.cc',
'src/cpp/client/client_context.cc',
'src/cpp/client/create_channel.cc',
'src/cpp/client/create_channel_internal.cc',
'src/cpp/client/create_channel_posix.cc',
'src/cpp/client/credentials_cc.cc',
'src/cpp/client/generic_stub.cc',
'src/cpp/common/alarm.cc',
'src/cpp/common/channel_arguments.cc',
'src/cpp/common/channel_filter.cc',
'src/cpp/common/completion_queue_cc.cc',
'src/cpp/common/core_codegen.cc',
'src/cpp/common/resource_quota_cc.cc',
'src/cpp/common/rpc_method.cc',
'src/cpp/common/version_cc.cc',
'src/cpp/server/async_generic_service.cc',
'src/cpp/server/channel_argument_option.cc',
'src/cpp/server/create_default_thread_pool.cc',
'src/cpp/server/dynamic_thread_pool.cc',
'src/cpp/server/health/default_health_check_service.cc',
'src/cpp/server/health/health.pb.c',
'src/cpp/server/health/health_check_service.cc',
'src/cpp/server/health/health_check_service_server_builder_option.cc',
'src/cpp/server/server_builder.cc',
'src/cpp/server/server_cc.cc',
'src/cpp/server/server_context.cc',
'src/cpp/server/server_credentials.cc',
'src/cpp/server/server_posix.cc',
'src/cpp/thread_manager/thread_manager.cc',
'src/cpp/util/byte_buffer_cc.cc',
'src/cpp/util/slice_cc.cc',
'src/cpp/util/status.cc',
'src/cpp/util/string_ref.cc',
'src/cpp/util/time_cc.cc',
'src/cpp/codegen/codegen_init.cc',
],
},
{
'target_name': 'grpc_benchmark',
'type': 'static_library',
'dependencies': [
'benchmark',
'grpc++_unsecure',
'grpc_test_util_unsecure',
'grpc_unsecure',
],
'sources': [
'test/cpp/microbenchmarks/helpers.cc',
],
},
{
'target_name': 'grpc_cli_libs',
'type': 'static_library',
'dependencies': [
'grpc++_proto_reflection_desc_db',
'grpc++',
'grpc',
],
'sources': [
'test/cpp/util/cli_call.cc',
'test/cpp/util/cli_credentials.cc',
'test/cpp/util/grpc_tool.cc',
'test/cpp/util/proto_file_parser.cc',
'test/cpp/util/service_describer.cc',
'src/proto/grpc/reflection/v1alpha/reflection.proto',
],
},
{
'target_name': 'grpc_plugin_support',
'type': 'static_library',
'dependencies': [
],
'sources': [
'src/compiler/cpp_generator.cc',
'src/compiler/csharp_generator.cc',
'src/compiler/node_generator.cc',
'src/compiler/objective_c_generator.cc',
'src/compiler/php_generator.cc',
'src/compiler/python_generator.cc',
'src/compiler/ruby_generator.cc',
],
},
{
'target_name': 'http2_client_main',
'type': 'static_library',
'dependencies': [
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'grpc++_test_config',
],
'sources': [
'src/proto/grpc/testing/empty.proto',
'src/proto/grpc/testing/messages.proto',
'src/proto/grpc/testing/test.proto',
'test/cpp/interop/http2_client.cc',
],
},
{
'target_name': 'interop_client_helper',
'type': 'static_library',
'dependencies': [
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'gpr',
],
'sources': [
'src/proto/grpc/testing/messages.proto',
'test/cpp/interop/client_helper.cc',
],
},
{
'target_name': 'interop_client_main',
'type': 'static_library',
'dependencies': [
'interop_client_helper',
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'gpr_test_util',
'gpr',
'grpc++_test_config',
],
'sources': [
'src/proto/grpc/testing/empty.proto',
'src/proto/grpc/testing/messages.proto',
'src/proto/grpc/testing/test.proto',
'test/cpp/interop/client.cc',
'test/cpp/interop/interop_client.cc',
],
},
{
'target_name': 'interop_server_helper',
'type': 'static_library',
'dependencies': [
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'gpr',
],
'sources': [
'test/cpp/interop/server_helper.cc',
],
},
{
'target_name': 'interop_server_lib',
'type': 'static_library',
'dependencies': [
'interop_server_helper',
'grpc++_test_util',
'grpc_test_util',
'grpc++',
'grpc',
'gpr_test_util',
'gpr',
'grpc++_test_config',
],
'sources': [
'src/proto/grpc/testing/empty.proto',
'src/proto/grpc/testing/messages.proto',
'src/proto/grpc/testing/test.proto',
'test/cpp/interop/interop_server.cc',
],
},
{
'target_name': 'interop_server_main',
'type': 'static_library',
'dependencies': [
'interop_server_lib',
],
'sources': [
'test/cpp/interop/interop_server_bootstrap.cc',
],
},
{
'target_name': 'qps',
'type': 'static_library',
'dependencies': [
'grpc_test_util',
'grpc++_test_util',
'grpc++_core_stats',
'grpc++',
'grpc',
],
'sources': [
'src/proto/grpc/testing/messages.proto',
'src/proto/grpc/testing/payloads.proto',
'src/proto/grpc/testing/stats.proto',
'src/proto/grpc/testing/control.proto',
'src/proto/grpc/testing/services.proto',
'test/cpp/qps/benchmark_config.cc',
'test/cpp/qps/client_async.cc',
'test/cpp/qps/client_sync.cc',
'test/cpp/qps/driver.cc',
'test/cpp/qps/parse_json.cc',
'test/cpp/qps/qps_worker.cc',
'test/cpp/qps/report.cc',
'test/cpp/qps/server_async.cc',
'test/cpp/qps/server_sync.cc',
'test/cpp/qps/usage_timer.cc',
],
},
{
'target_name': 'grpc_csharp_ext',
'type': 'static_library',
'dependencies': [
'grpc',
'gpr',
],
'sources': [
'src/csharp/ext/grpc_csharp_ext.c',
],
},
{
'target_name': 'boringssl',
'type': 'static_library',
'dependencies': [
],
'sources': [
'src/boringssl/err_data.c',
'third_party/boringssl/crypto/asn1/a_bitstr.c',
'third_party/boringssl/crypto/asn1/a_bool.c',
'third_party/boringssl/crypto/asn1/a_d2i_fp.c',
'third_party/boringssl/crypto/asn1/a_dup.c',
'third_party/boringssl/crypto/asn1/a_enum.c',
'third_party/boringssl/crypto/asn1/a_gentm.c',
'third_party/boringssl/crypto/asn1/a_i2d_fp.c',
'third_party/boringssl/crypto/asn1/a_int.c',
'third_party/boringssl/crypto/asn1/a_mbstr.c',
'third_party/boringssl/crypto/asn1/a_object.c',
'third_party/boringssl/crypto/asn1/a_octet.c',
'third_party/boringssl/crypto/asn1/a_print.c',
'third_party/boringssl/crypto/asn1/a_strnid.c',
'third_party/boringssl/crypto/asn1/a_time.c',
'third_party/boringssl/crypto/asn1/a_type.c',
'third_party/boringssl/crypto/asn1/a_utctm.c',
'third_party/boringssl/crypto/asn1/a_utf8.c',
'third_party/boringssl/crypto/asn1/asn1_lib.c',
'third_party/boringssl/crypto/asn1/asn1_par.c',
'third_party/boringssl/crypto/asn1/asn_pack.c',
'third_party/boringssl/crypto/asn1/f_enum.c',
'third_party/boringssl/crypto/asn1/f_int.c',
'third_party/boringssl/crypto/asn1/f_string.c',
'third_party/boringssl/crypto/asn1/tasn_dec.c',
'third_party/boringssl/crypto/asn1/tasn_enc.c',
'third_party/boringssl/crypto/asn1/tasn_fre.c',
'third_party/boringssl/crypto/asn1/tasn_new.c',
'third_party/boringssl/crypto/asn1/tasn_typ.c',
'third_party/boringssl/crypto/asn1/tasn_utl.c',
'third_party/boringssl/crypto/asn1/time_support.c',
'third_party/boringssl/crypto/base64/base64.c',
'third_party/boringssl/crypto/bio/bio.c',
'third_party/boringssl/crypto/bio/bio_mem.c',
'third_party/boringssl/crypto/bio/connect.c',
'third_party/boringssl/crypto/bio/fd.c',
'third_party/boringssl/crypto/bio/file.c',
'third_party/boringssl/crypto/bio/hexdump.c',
'third_party/boringssl/crypto/bio/pair.c',
'third_party/boringssl/crypto/bio/printf.c',
'third_party/boringssl/crypto/bio/socket.c',
'third_party/boringssl/crypto/bio/socket_helper.c',
'third_party/boringssl/crypto/bn_extra/bn_asn1.c',
'third_party/boringssl/crypto/bn_extra/convert.c',
'third_party/boringssl/crypto/buf/buf.c',
'third_party/boringssl/crypto/bytestring/asn1_compat.c',
'third_party/boringssl/crypto/bytestring/ber.c',
'third_party/boringssl/crypto/bytestring/cbb.c',
'third_party/boringssl/crypto/bytestring/cbs.c',
'third_party/boringssl/crypto/chacha/chacha.c',
'third_party/boringssl/crypto/cipher_extra/cipher_extra.c',
'third_party/boringssl/crypto/cipher_extra/derive_key.c',
'third_party/boringssl/crypto/cipher_extra/e_aesctrhmac.c',
'third_party/boringssl/crypto/cipher_extra/e_aesgcmsiv.c',
'third_party/boringssl/crypto/cipher_extra/e_chacha20poly1305.c',
'third_party/boringssl/crypto/cipher_extra/e_null.c',
'third_party/boringssl/crypto/cipher_extra/e_rc2.c',
'third_party/boringssl/crypto/cipher_extra/e_rc4.c',
'third_party/boringssl/crypto/cipher_extra/e_ssl3.c',
'third_party/boringssl/crypto/cipher_extra/e_tls.c',
'third_party/boringssl/crypto/cipher_extra/tls_cbc.c',
'third_party/boringssl/crypto/cmac/cmac.c',
'third_party/boringssl/crypto/conf/conf.c',
'third_party/boringssl/crypto/cpu-aarch64-linux.c',
'third_party/boringssl/crypto/cpu-arm-linux.c',
'third_party/boringssl/crypto/cpu-arm.c',
'third_party/boringssl/crypto/cpu-intel.c',
'third_party/boringssl/crypto/cpu-ppc64le.c',
'third_party/boringssl/crypto/crypto.c',
'third_party/boringssl/crypto/curve25519/spake25519.c',
'third_party/boringssl/crypto/curve25519/x25519-x86_64.c',
'third_party/boringssl/crypto/dh/check.c',
'third_party/boringssl/crypto/dh/dh.c',
'third_party/boringssl/crypto/dh/dh_asn1.c',
'third_party/boringssl/crypto/dh/params.c',
'third_party/boringssl/crypto/digest_extra/digest_extra.c',
'third_party/boringssl/crypto/dsa/dsa.c',
'third_party/boringssl/crypto/dsa/dsa_asn1.c',
'third_party/boringssl/crypto/ec_extra/ec_asn1.c',
'third_party/boringssl/crypto/ecdh/ecdh.c',
'third_party/boringssl/crypto/ecdsa_extra/ecdsa_asn1.c',
'third_party/boringssl/crypto/engine/engine.c',
'third_party/boringssl/crypto/err/err.c',
'third_party/boringssl/crypto/evp/digestsign.c',
'third_party/boringssl/crypto/evp/evp.c',
'third_party/boringssl/crypto/evp/evp_asn1.c',
'third_party/boringssl/crypto/evp/evp_ctx.c',
'third_party/boringssl/crypto/evp/p_dsa_asn1.c',
'third_party/boringssl/crypto/evp/p_ec.c',
'third_party/boringssl/crypto/evp/p_ec_asn1.c',
'third_party/boringssl/crypto/evp/p_ed25519.c',
'third_party/boringssl/crypto/evp/p_ed25519_asn1.c',
'third_party/boringssl/crypto/evp/p_rsa.c',
'third_party/boringssl/crypto/evp/p_rsa_asn1.c',
'third_party/boringssl/crypto/evp/pbkdf.c',
'third_party/boringssl/crypto/evp/print.c',
'third_party/boringssl/crypto/evp/scrypt.c',
'third_party/boringssl/crypto/evp/sign.c',
'third_party/boringssl/crypto/ex_data.c',
'third_party/boringssl/crypto/fipsmodule/bcm.c',
'third_party/boringssl/crypto/fipsmodule/is_fips.c',
'third_party/boringssl/crypto/hkdf/hkdf.c',
'third_party/boringssl/crypto/lhash/lhash.c',
'third_party/boringssl/crypto/mem.c',
'third_party/boringssl/crypto/obj/obj.c',
'third_party/boringssl/crypto/obj/obj_xref.c',
'third_party/boringssl/crypto/pem/pem_all.c',
'third_party/boringssl/crypto/pem/pem_info.c',
'third_party/boringssl/crypto/pem/pem_lib.c',
'third_party/boringssl/crypto/pem/pem_oth.c',
'third_party/boringssl/crypto/pem/pem_pk8.c',
'third_party/boringssl/crypto/pem/pem_pkey.c',
'third_party/boringssl/crypto/pem/pem_x509.c',
'third_party/boringssl/crypto/pem/pem_xaux.c',
'third_party/boringssl/crypto/pkcs7/pkcs7.c',
'third_party/boringssl/crypto/pkcs7/pkcs7_x509.c',
'third_party/boringssl/crypto/pkcs8/p5_pbev2.c',
'third_party/boringssl/crypto/pkcs8/pkcs8.c',
'third_party/boringssl/crypto/pkcs8/pkcs8_x509.c',
'third_party/boringssl/crypto/poly1305/poly1305.c',
'third_party/boringssl/crypto/poly1305/poly1305_arm.c',
'third_party/boringssl/crypto/poly1305/poly1305_vec.c',
'third_party/boringssl/crypto/pool/pool.c',
'third_party/boringssl/crypto/rand_extra/deterministic.c',
'third_party/boringssl/crypto/rand_extra/forkunsafe.c',
'third_party/boringssl/crypto/rand_extra/fuchsia.c',
'third_party/boringssl/crypto/rand_extra/rand_extra.c',
'third_party/boringssl/crypto/rand_extra/windows.c',
'third_party/boringssl/crypto/rc4/rc4.c',
'third_party/boringssl/crypto/refcount_c11.c',
'third_party/boringssl/crypto/refcount_lock.c',
'third_party/boringssl/crypto/rsa_extra/rsa_asn1.c',
'third_party/boringssl/crypto/stack/stack.c',
'third_party/boringssl/crypto/thread.c',
'third_party/boringssl/crypto/thread_none.c',
'third_party/boringssl/crypto/thread_pthread.c',
'third_party/boringssl/crypto/thread_win.c',
'third_party/boringssl/crypto/x509/a_digest.c',
'third_party/boringssl/crypto/x509/a_sign.c',
'third_party/boringssl/crypto/x509/a_strex.c',
'third_party/boringssl/crypto/x509/a_verify.c',
'third_party/boringssl/crypto/x509/algorithm.c',
'third_party/boringssl/crypto/x509/asn1_gen.c',
'third_party/boringssl/crypto/x509/by_dir.c',
'third_party/boringssl/crypto/x509/by_file.c',
'third_party/boringssl/crypto/x509/i2d_pr.c',
'third_party/boringssl/crypto/x509/rsa_pss.c',
'third_party/boringssl/crypto/x509/t_crl.c',
'third_party/boringssl/crypto/x509/t_req.c',
'third_party/boringssl/crypto/x509/t_x509.c',
'third_party/boringssl/crypto/x509/t_x509a.c',
'third_party/boringssl/crypto/x509/x509.c',
'third_party/boringssl/crypto/x509/x509_att.c',
'third_party/boringssl/crypto/x509/x509_cmp.c',
'third_party/boringssl/crypto/x509/x509_d2.c',
'third_party/boringssl/crypto/x509/x509_def.c',
'third_party/boringssl/crypto/x509/x509_ext.c',
'third_party/boringssl/crypto/x509/x509_lu.c',
'third_party/boringssl/crypto/x509/x509_obj.c',
'third_party/boringssl/crypto/x509/x509_r2x.c',
'third_party/boringssl/crypto/x509/x509_req.c',
'third_party/boringssl/crypto/x509/x509_set.c',
'third_party/boringssl/crypto/x509/x509_trs.c',
'third_party/boringssl/crypto/x509/x509_txt.c',
'third_party/boringssl/crypto/x509/x509_v3.c',
'third_party/boringssl/crypto/x509/x509_vfy.c',
'third_party/boringssl/crypto/x509/x509_vpm.c',
'third_party/boringssl/crypto/x509/x509cset.c',
'third_party/boringssl/crypto/x509/x509name.c',
'third_party/boringssl/crypto/x509/x509rset.c',
'third_party/boringssl/crypto/x509/x509spki.c',
'third_party/boringssl/crypto/x509/x_algor.c',
'third_party/boringssl/crypto/x509/x_all.c',
'third_party/boringssl/crypto/x509/x_attrib.c',
'third_party/boringssl/crypto/x509/x_crl.c',
'third_party/boringssl/crypto/x509/x_exten.c',
'third_party/boringssl/crypto/x509/x_info.c',
'third_party/boringssl/crypto/x509/x_name.c',
'third_party/boringssl/crypto/x509/x_pkey.c',
'third_party/boringssl/crypto/x509/x_pubkey.c',
'third_party/boringssl/crypto/x509/x_req.c',
'third_party/boringssl/crypto/x509/x_sig.c',
'third_party/boringssl/crypto/x509/x_spki.c',
'third_party/boringssl/crypto/x509/x_val.c',
'third_party/boringssl/crypto/x509/x_x509.c',
'third_party/boringssl/crypto/x509/x_x509a.c',
'third_party/boringssl/crypto/x509v3/pcy_cache.c',
'third_party/boringssl/crypto/x509v3/pcy_data.c',
'third_party/boringssl/crypto/x509v3/pcy_lib.c',
'third_party/boringssl/crypto/x509v3/pcy_map.c',
'third_party/boringssl/crypto/x509v3/pcy_node.c',
'third_party/boringssl/crypto/x509v3/pcy_tree.c',
'third_party/boringssl/crypto/x509v3/v3_akey.c',
'third_party/boringssl/crypto/x509v3/v3_akeya.c',
'third_party/boringssl/crypto/x509v3/v3_alt.c',
'third_party/boringssl/crypto/x509v3/v3_bcons.c',
'third_party/boringssl/crypto/x509v3/v3_bitst.c',
'third_party/boringssl/crypto/x509v3/v3_conf.c',
'third_party/boringssl/crypto/x509v3/v3_cpols.c',
'third_party/boringssl/crypto/x509v3/v3_crld.c',
'third_party/boringssl/crypto/x509v3/v3_enum.c',
'third_party/boringssl/crypto/x509v3/v3_extku.c',
'third_party/boringssl/crypto/x509v3/v3_genn.c',
'third_party/boringssl/crypto/x509v3/v3_ia5.c',
'third_party/boringssl/crypto/x509v3/v3_info.c',
'third_party/boringssl/crypto/x509v3/v3_int.c',
'third_party/boringssl/crypto/x509v3/v3_lib.c',
'third_party/boringssl/crypto/x509v3/v3_ncons.c',
'third_party/boringssl/crypto/x509v3/v3_pci.c',
'third_party/boringssl/crypto/x509v3/v3_pcia.c',
'third_party/boringssl/crypto/x509v3/v3_pcons.c',
'third_party/boringssl/crypto/x509v3/v3_pku.c',
'third_party/boringssl/crypto/x509v3/v3_pmaps.c',
'third_party/boringssl/crypto/x509v3/v3_prn.c',
'third_party/boringssl/crypto/x509v3/v3_purp.c',
'third_party/boringssl/crypto/x509v3/v3_skey.c',
'third_party/boringssl/crypto/x509v3/v3_sxnet.c',
'third_party/boringssl/crypto/x509v3/v3_utl.c',
'third_party/boringssl/ssl/bio_ssl.cc',
'third_party/boringssl/ssl/custom_extensions.cc',
'third_party/boringssl/ssl/d1_both.cc',
'third_party/boringssl/ssl/d1_lib.cc',
'third_party/boringssl/ssl/d1_pkt.cc',
'third_party/boringssl/ssl/d1_srtp.cc',
'third_party/boringssl/ssl/dtls_method.cc',
'third_party/boringssl/ssl/dtls_record.cc',
'third_party/boringssl/ssl/handshake.cc',
'third_party/boringssl/ssl/handshake_client.cc',
'third_party/boringssl/ssl/handshake_server.cc',
'third_party/boringssl/ssl/s3_both.cc',
'third_party/boringssl/ssl/s3_lib.cc',
'third_party/boringssl/ssl/s3_pkt.cc',
'third_party/boringssl/ssl/ssl_aead_ctx.cc',
'third_party/boringssl/ssl/ssl_asn1.cc',
'third_party/boringssl/ssl/ssl_buffer.cc',
'third_party/boringssl/ssl/ssl_cert.cc',
'third_party/boringssl/ssl/ssl_cipher.cc',
'third_party/boringssl/ssl/ssl_file.cc',
'third_party/boringssl/ssl/ssl_key_share.cc',
'third_party/boringssl/ssl/ssl_lib.cc',
'third_party/boringssl/ssl/ssl_privkey.cc',
'third_party/boringssl/ssl/ssl_session.cc',
'third_party/boringssl/ssl/ssl_stat.cc',
'third_party/boringssl/ssl/ssl_transcript.cc',
'third_party/boringssl/ssl/ssl_versions.cc',
'third_party/boringssl/ssl/ssl_x509.cc',
'third_party/boringssl/ssl/t1_enc.cc',
'third_party/boringssl/ssl/t1_lib.cc',
'third_party/boringssl/ssl/tls13_both.cc',
'third_party/boringssl/ssl/tls13_client.cc',
'third_party/boringssl/ssl/tls13_enc.cc',
'third_party/boringssl/ssl/tls13_server.cc',
'third_party/boringssl/ssl/tls_method.cc',
'third_party/boringssl/ssl/tls_record.cc',
'third_party/boringssl/third_party/fiat/curve25519.c',
],
},
{
'target_name': 'boringssl_test_util',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/boringssl/crypto/test/file_test.cc',
'third_party/boringssl/crypto/test/malloc.cc',
'third_party/boringssl/crypto/test/test_util.cc',
],
},
{
'target_name': 'boringssl_crypto_test_data_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'src/boringssl/crypto_test_data.cc',
],
},
{
'target_name': 'boringssl_asn1_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/asn1/asn1_test.cc',
],
},
{
'target_name': 'boringssl_base64_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/base64/base64_test.cc',
],
},
{
'target_name': 'boringssl_bio_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/bio/bio_test.cc',
],
},
{
'target_name': 'boringssl_buf_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/buf/buf_test.cc',
],
},
{
'target_name': 'boringssl_bytestring_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/bytestring/bytestring_test.cc',
],
},
{
'target_name': 'boringssl_chacha_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/chacha/chacha_test.cc',
],
},
{
'target_name': 'boringssl_aead_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/cipher_extra/aead_test.cc',
],
},
{
'target_name': 'boringssl_cipher_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/cipher_extra/cipher_test.cc',
],
},
{
'target_name': 'boringssl_cmac_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/cmac/cmac_test.cc',
],
},
{
'target_name': 'boringssl_compiler_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/compiler_test.cc',
],
},
{
'target_name': 'boringssl_constant_time_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/constant_time_test.cc',
],
},
{
'target_name': 'boringssl_ed25519_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/curve25519/ed25519_test.cc',
],
},
{
'target_name': 'boringssl_spake25519_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/curve25519/spake25519_test.cc',
],
},
{
'target_name': 'boringssl_x25519_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/curve25519/x25519_test.cc',
],
},
{
'target_name': 'boringssl_dh_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/dh/dh_test.cc',
],
},
{
'target_name': 'boringssl_digest_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/digest_extra/digest_test.cc',
],
},
{
'target_name': 'boringssl_dsa_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/dsa/dsa_test.cc',
],
},
{
'target_name': 'boringssl_ecdh_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/ecdh/ecdh_test.cc',
],
},
{
'target_name': 'boringssl_err_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/err/err_test.cc',
],
},
{
'target_name': 'boringssl_evp_extra_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/evp/evp_extra_test.cc',
],
},
{
'target_name': 'boringssl_evp_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/evp/evp_test.cc',
],
},
{
'target_name': 'boringssl_pbkdf_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/evp/pbkdf_test.cc',
],
},
{
'target_name': 'boringssl_scrypt_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/evp/scrypt_test.cc',
],
},
{
'target_name': 'boringssl_aes_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/aes/aes_test.cc',
],
},
{
'target_name': 'boringssl_bn_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/bn/bn_test.cc',
],
},
{
'target_name': 'boringssl_ec_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/ec/ec_test.cc',
],
},
{
'target_name': 'boringssl_p256-x86_64_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/ec/p256-x86_64_test.cc',
],
},
{
'target_name': 'boringssl_ecdsa_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/ecdsa/ecdsa_test.cc',
],
},
{
'target_name': 'boringssl_gcm_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/modes/gcm_test.cc',
],
},
{
'target_name': 'boringssl_ctrdrbg_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/fipsmodule/rand/ctrdrbg_test.cc',
],
},
{
'target_name': 'boringssl_hkdf_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/hkdf/hkdf_test.cc',
],
},
{
'target_name': 'boringssl_hmac_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/hmac_extra/hmac_test.cc',
],
},
{
'target_name': 'boringssl_lhash_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/lhash/lhash_test.cc',
],
},
{
'target_name': 'boringssl_obj_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/obj/obj_test.cc',
],
},
{
'target_name': 'boringssl_pkcs7_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/pkcs7/pkcs7_test.cc',
],
},
{
'target_name': 'boringssl_pkcs12_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/pkcs8/pkcs12_test.cc',
],
},
{
'target_name': 'boringssl_pkcs8_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/pkcs8/pkcs8_test.cc',
],
},
{
'target_name': 'boringssl_poly1305_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/poly1305/poly1305_test.cc',
],
},
{
'target_name': 'boringssl_pool_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/pool/pool_test.cc',
],
},
{
'target_name': 'boringssl_refcount_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/refcount_test.cc',
],
},
{
'target_name': 'boringssl_rsa_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/rsa_extra/rsa_test.cc',
],
},
{
'target_name': 'boringssl_file_test_gtest_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/test/file_test_gtest.cc',
],
},
{
'target_name': 'boringssl_gtest_main_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/test/gtest_main.cc',
],
},
{
'target_name': 'boringssl_thread_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/thread_test.cc',
],
},
{
'target_name': 'boringssl_x509_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/x509/x509_test.cc',
],
},
{
'target_name': 'boringssl_tab_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/x509v3/tab_test.cc',
],
},
{
'target_name': 'boringssl_v3name_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/crypto/x509v3/v3name_test.cc',
],
},
{
'target_name': 'boringssl_span_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/ssl/span_test.cc',
],
},
{
'target_name': 'boringssl_ssl_test_lib',
'type': 'static_library',
'dependencies': [
'boringssl_test_util',
'boringssl',
],
'sources': [
'third_party/boringssl/ssl/ssl_test.cc',
],
},
{
'target_name': 'benchmark',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/benchmark/src/benchmark.cc',
'third_party/benchmark/src/benchmark_register.cc',
'third_party/benchmark/src/colorprint.cc',
'third_party/benchmark/src/commandlineflags.cc',
'third_party/benchmark/src/complexity.cc',
'third_party/benchmark/src/console_reporter.cc',
'third_party/benchmark/src/counter.cc',
'third_party/benchmark/src/csv_reporter.cc',
'third_party/benchmark/src/json_reporter.cc',
'third_party/benchmark/src/reporter.cc',
'third_party/benchmark/src/sleep.cc',
'third_party/benchmark/src/string_util.cc',
'third_party/benchmark/src/sysinfo.cc',
'third_party/benchmark/src/timers.cc',
],
},
{
'target_name': 'z',
'type': 'static_library',
'dependencies': [
],
'sources': [
'third_party/zlib/adler32.c',
'third_party/zlib/compress.c',
'third_party/zlib/crc32.c',
'third_party/zlib/deflate.c',
'third_party/zlib/gzclose.c',
'third_party/zlib/gzlib.c',
'third_party/zlib/gzread.c',
'third_party/zlib/gzwrite.c',
'third_party/zlib/infback.c',
'third_party/zlib/inffast.c',
'third_party/zlib/inflate.c',
'third_party/zlib/inftrees.c',
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
],
},
{
'target_name': 'bad_client_test',
'type': 'static_library',
'dependencies': [
'grpc_test_util_unsecure',
'grpc_unsecure',
'gpr_test_util',
'gpr',
],
'sources': [
'test/core/bad_client/bad_client.cc',
],
},
{
'target_name': 'end2end_tests',
'type': 'static_library',
'dependencies': [
'grpc_test_util',
'grpc',
'gpr_test_util',
'gpr',
],
'sources': [
'test/core/end2end/end2end_tests.cc',
'test/core/end2end/end2end_test_utils.cc',
'test/core/end2end/tests/authority_not_supported.cc',
'test/core/end2end/tests/bad_hostname.cc',
'test/core/end2end/tests/bad_ping.cc',
'test/core/end2end/tests/binary_metadata.cc',
'test/core/end2end/tests/call_creds.cc',
'test/core/end2end/tests/cancel_after_accept.cc',
'test/core/end2end/tests/cancel_after_client_done.cc',
'test/core/end2end/tests/cancel_after_invoke.cc',
'test/core/end2end/tests/cancel_after_round_trip.cc',
'test/core/end2end/tests/cancel_before_invoke.cc',
'test/core/end2end/tests/cancel_in_a_vacuum.cc',
'test/core/end2end/tests/cancel_with_status.cc',
'test/core/end2end/tests/compressed_payload.cc',
'test/core/end2end/tests/connectivity.cc',
'test/core/end2end/tests/default_host.cc',
'test/core/end2end/tests/disappearing_server.cc',
'test/core/end2end/tests/empty_batch.cc',
'test/core/end2end/tests/filter_call_init_fails.cc',
'test/core/end2end/tests/filter_causes_close.cc',
'test/core/end2end/tests/filter_latency.cc',
'test/core/end2end/tests/filter_status_code.cc',
'test/core/end2end/tests/graceful_server_shutdown.cc',
'test/core/end2end/tests/high_initial_seqno.cc',
'test/core/end2end/tests/hpack_size.cc',
'test/core/end2end/tests/idempotent_request.cc',
'test/core/end2end/tests/invoke_large_request.cc',
'test/core/end2end/tests/keepalive_timeout.cc',
'test/core/end2end/tests/large_metadata.cc',
'test/core/end2end/tests/load_reporting_hook.cc',
'test/core/end2end/tests/max_concurrent_streams.cc',
'test/core/end2end/tests/max_connection_age.cc',
'test/core/end2end/tests/max_connection_idle.cc',
'test/core/end2end/tests/max_message_length.cc',
'test/core/end2end/tests/negative_deadline.cc',
'test/core/end2end/tests/network_status_change.cc',
'test/core/end2end/tests/no_logging.cc',
'test/core/end2end/tests/no_op.cc',
'test/core/end2end/tests/payload.cc',
'test/core/end2end/tests/ping.cc',
'test/core/end2end/tests/ping_pong_streaming.cc',
'test/core/end2end/tests/proxy_auth.cc',
'test/core/end2end/tests/registered_call.cc',
'test/core/end2end/tests/request_with_flags.cc',
'test/core/end2end/tests/request_with_payload.cc',
'test/core/end2end/tests/resource_quota_server.cc',
'test/core/end2end/tests/retry.cc',
'test/core/end2end/tests/retry_cancellation.cc',
'test/core/end2end/tests/retry_disabled.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_initial_batch.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_subsequent_batch.cc',
'test/core/end2end/tests/retry_non_retriable_status.cc',
'test/core/end2end/tests/retry_recv_initial_metadata.cc',
'test/core/end2end/tests/retry_recv_message.cc',
'test/core/end2end/tests/retry_server_pushback_delay.cc',
'test/core/end2end/tests/retry_server_pushback_disabled.cc',
'test/core/end2end/tests/retry_streaming.cc',
'test/core/end2end/tests/retry_streaming_after_commit.cc',
'test/core/end2end/tests/retry_streaming_succeeds_before_replay_finished.cc',
'test/core/end2end/tests/retry_throttled.cc',
'test/core/end2end/tests/retry_too_many_attempts.cc',
'test/core/end2end/tests/server_finishes_request.cc',
'test/core/end2end/tests/shutdown_finishes_calls.cc',
'test/core/end2end/tests/shutdown_finishes_tags.cc',
'test/core/end2end/tests/simple_cacheable_request.cc',
'test/core/end2end/tests/simple_delayed_request.cc',
'test/core/end2end/tests/simple_metadata.cc',
'test/core/end2end/tests/simple_request.cc',
'test/core/end2end/tests/stream_compression_compressed_payload.cc',
'test/core/end2end/tests/stream_compression_payload.cc',
'test/core/end2end/tests/stream_compression_ping_pong_streaming.cc',
'test/core/end2end/tests/streaming_error_response.cc',
'test/core/end2end/tests/trailing_metadata.cc',
'test/core/end2end/tests/workaround_cronet_compression.cc',
'test/core/end2end/tests/write_buffering.cc',
'test/core/end2end/tests/write_buffering_at_end.cc',
],
},
{
'target_name': 'end2end_nosec_tests',
'type': 'static_library',
'dependencies': [
'grpc_test_util_unsecure',
'grpc_unsecure',
'gpr_test_util',
'gpr',
],
'sources': [
'test/core/end2end/end2end_nosec_tests.cc',
'test/core/end2end/end2end_test_utils.cc',
'test/core/end2end/tests/authority_not_supported.cc',
'test/core/end2end/tests/bad_hostname.cc',
'test/core/end2end/tests/bad_ping.cc',
'test/core/end2end/tests/binary_metadata.cc',
'test/core/end2end/tests/cancel_after_accept.cc',
'test/core/end2end/tests/cancel_after_client_done.cc',
'test/core/end2end/tests/cancel_after_invoke.cc',
'test/core/end2end/tests/cancel_after_round_trip.cc',
'test/core/end2end/tests/cancel_before_invoke.cc',
'test/core/end2end/tests/cancel_in_a_vacuum.cc',
'test/core/end2end/tests/cancel_with_status.cc',
'test/core/end2end/tests/compressed_payload.cc',
'test/core/end2end/tests/connectivity.cc',
'test/core/end2end/tests/default_host.cc',
'test/core/end2end/tests/disappearing_server.cc',
'test/core/end2end/tests/empty_batch.cc',
'test/core/end2end/tests/filter_call_init_fails.cc',
'test/core/end2end/tests/filter_causes_close.cc',
'test/core/end2end/tests/filter_latency.cc',
'test/core/end2end/tests/filter_status_code.cc',
'test/core/end2end/tests/graceful_server_shutdown.cc',
'test/core/end2end/tests/high_initial_seqno.cc',
'test/core/end2end/tests/hpack_size.cc',
'test/core/end2end/tests/idempotent_request.cc',
'test/core/end2end/tests/invoke_large_request.cc',
'test/core/end2end/tests/keepalive_timeout.cc',
'test/core/end2end/tests/large_metadata.cc',
'test/core/end2end/tests/load_reporting_hook.cc',
'test/core/end2end/tests/max_concurrent_streams.cc',
'test/core/end2end/tests/max_connection_age.cc',
'test/core/end2end/tests/max_connection_idle.cc',
'test/core/end2end/tests/max_message_length.cc',
'test/core/end2end/tests/negative_deadline.cc',
'test/core/end2end/tests/network_status_change.cc',
'test/core/end2end/tests/no_logging.cc',
'test/core/end2end/tests/no_op.cc',
'test/core/end2end/tests/payload.cc',
'test/core/end2end/tests/ping.cc',
'test/core/end2end/tests/ping_pong_streaming.cc',
'test/core/end2end/tests/proxy_auth.cc',
'test/core/end2end/tests/registered_call.cc',
'test/core/end2end/tests/request_with_flags.cc',
'test/core/end2end/tests/request_with_payload.cc',
'test/core/end2end/tests/resource_quota_server.cc',
'test/core/end2end/tests/retry.cc',
'test/core/end2end/tests/retry_cancellation.cc',
'test/core/end2end/tests/retry_disabled.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_initial_batch.cc',
'test/core/end2end/tests/retry_exceeds_buffer_size_in_subsequent_batch.cc',
'test/core/end2end/tests/retry_non_retriable_status.cc',
'test/core/end2end/tests/retry_recv_initial_metadata.cc',
'test/core/end2end/tests/retry_recv_message.cc',
'test/core/end2end/tests/retry_server_pushback_delay.cc',
'test/core/end2end/tests/retry_server_pushback_disabled.cc',
'test/core/end2end/tests/retry_streaming.cc',
'test/core/end2end/tests/retry_streaming_after_commit.cc',
'test/core/end2end/tests/retry_streaming_succeeds_before_replay_finished.cc',
'test/core/end2end/tests/retry_throttled.cc',
'test/core/end2end/tests/retry_too_many_attempts.cc',
'test/core/end2end/tests/server_finishes_request.cc',
'test/core/end2end/tests/shutdown_finishes_calls.cc',
'test/core/end2end/tests/shutdown_finishes_tags.cc',
'test/core/end2end/tests/simple_cacheable_request.cc',
'test/core/end2end/tests/simple_delayed_request.cc',
'test/core/end2end/tests/simple_metadata.cc',
'test/core/end2end/tests/simple_request.cc',
'test/core/end2end/tests/stream_compression_compressed_payload.cc',
'test/core/end2end/tests/stream_compression_payload.cc',
'test/core/end2end/tests/stream_compression_ping_pong_streaming.cc',
'test/core/end2end/tests/streaming_error_response.cc',
'test/core/end2end/tests/trailing_metadata.cc',
'test/core/end2end/tests/workaround_cronet_compression.cc',
'test/core/end2end/tests/write_buffering.cc',
'test/core/end2end/tests/write_buffering_at_end.cc',
],
},
]
}
| 41.456912
| 101
| 0.643008
|
33b88947b8494ea712441f0df85fc6744e51e7ee
| 2,067
|
py
|
Python
|
enginessl/ml/data_handling/system.py
|
XXXalice/EngineSSL
|
582753932830cb7b714fde57490a72774af27cf4
|
[
"MIT"
] | 22
|
2018-10-20T19:39:58.000Z
|
2021-09-21T05:42:54.000Z
|
enginessl/ml/data_handling/system.py
|
AliClouds/EngineSSL
|
1b65b9c903d31c6ed2d96e906035adce22ce46ea
|
[
"MIT"
] | 73
|
2018-10-05T13:41:36.000Z
|
2020-10-04T20:27:20.000Z
|
enginessl/ml/data_handling/system.py
|
AliClouds/EngineSSL
|
1b65b9c903d31c6ed2d96e906035adce22ce46ea
|
[
"MIT"
] | 8
|
2018-10-23T12:31:30.000Z
|
2021-06-30T18:14:31.000Z
|
from . import kernel
class DataHandling(kernel.OpponentImage ,kernel.Kernel):
def __init__(self, target_label, image_tanks=[]):
print('Data processing execution.')
# self.oppo = kernel.OpponentImage(image_tanks)
self.data_handling = kernel.Kernel(image_tanks)
# print('labels :{}'.format(self.oppo.labels))
def get_builtup_data(self,targets=[], not_targets=[], color_mode='grayscale'):
"""
画像のフルパスを受け取る
:return: 正規化されたデータ、ラベル
"""
train, valid = self.data_handling.split_train_test(targets=targets, not_targets=not_targets)
return self.data_handling.preprocess(train=train, valid=valid, color_mode=color_mode)
def get_label_name(self):
self.data_handling
def read_dirs(self, target_label):
"""
:return: target type=list
ターゲット画像のリストをくるんだリスト
target[0]で取り出す
0dimensionには基本的に1要素しか入らないがnot_targetの形状を考慮しそちらの形状で揃えた
not_target type=list
ターゲット以外の画像のリストをくるんだリスト
not_target[index]で各ノイズにアクセスできる
ノイズ一つ一つもリストになっていて、そのまま一つのラベルとして使用する
"""
target_dir, not_target_dir = self.get_img_dir(target_label=target_label, split_tag=True)
return self.read_datas_dir(target=target_dir, not_target=not_target_dir, target_label=target_label)
def oppo_kernel(self, target_dir, image_tanks):
print('Noise data processing execution.')
self.oppo = kernel.OpponentImage(target_dir=target_dir ,image_tanks=image_tanks, params=self.data_handling.params)
return self.oppo
def make_noise(self, target):
print('Making noise data.')
labels = self.oppo.make_noise()
return labels.insert(0, target)
def get_builtup_data_include_noise(self):
"""
:return: tuple (self.x_train, self.x_test, self.y_train, self.y_test)
"""
return self.oppo.return_datafiles()
def test_show(self, img_bin):
self.oppo.test_show(img_bin)
| 36.910714
| 122
| 0.657958
|
9f74dd6eed51a1d82303b9206a3e468f5dc16eb9
| 22,718
|
py
|
Python
|
tests/runtests.py
|
epicpewpew/gef
|
9798a2816d21da5a54b979e19c54446e08414ea9
|
[
"MIT"
] | 1
|
2019-10-18T02:43:54.000Z
|
2019-10-18T02:43:54.000Z
|
tests/runtests.py
|
wqsemc/gef
|
9798a2816d21da5a54b979e19c54446e08414ea9
|
[
"MIT"
] | null | null | null |
tests/runtests.py
|
wqsemc/gef
|
9798a2816d21da5a54b979e19c54446e08414ea9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# Run tests by spawning a gdb instance for every command.
#
from __future__ import print_function
import os
import subprocess
import sys
import tempfile
import unittest
from helpers import gdb_run_cmd, \
gdb_run_silent_cmd, \
gdb_start_silent_cmd, \
gdb_start_silent_cmd_last_line, \
gdb_test_python_method
class GefUnitTestGeneric(unittest.TestCase):
"""Generic class for command testing, that defines all helpers"""
@staticmethod
def assertNoException(buf): #pylint: disable=invalid-name
if not ("Python Exception <" not in buf
and "Traceback" not in buf
and "'gdb.error'" not in buf
and "Exception raised" not in buf
and "failed to execute properly, reason:" not in buf):
raise AssertionError("Detected error in gdb output")
@staticmethod
def assertFailIfInactiveSession(buf): #pylint: disable=invalid-name
if "No debugging session active" not in buf:
raise AssertionError("No debugging session inactive warning")
class TestGefCommands(GefUnitTestGeneric): #pylint: disable=too-many-public-methods
"""Tests GEF GDB commands."""
def test_cmd_canary(self):
self.assertFailIfInactiveSession(gdb_run_cmd("canary"))
res = gdb_start_silent_cmd("canary", target="tests/binaries/canary.out")
self.assertNoException(res)
self.assertIn("Found AT_RANDOM at", res)
self.assertIn("The canary of process ", res)
return
def test_cmd_capstone_disassemble(self):
self.assertFailIfInactiveSession(gdb_run_cmd("capstone-disassemble"))
res = gdb_start_silent_cmd("capstone-disassemble")
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) > 1)
return
def test_cmd_checksec(self):
cmd = "checksec"
res = gdb_run_cmd(cmd)
self.assertNoException(res)
target = "tests/binaries/checksec-no-canary.out"
res = gdb_run_cmd(cmd, target=target)
self.assertIn("Canary : ✘", res)
target = "tests/binaries/checksec-no-nx.out"
res = gdb_run_cmd(cmd, target=target)
self.assertIn("NX : ✘", res)
target = "tests/binaries/checksec-no-pie.out"
res = gdb_run_cmd(cmd, target=target)
self.assertIn("PIE : ✘", res)
return
def test_cmd_dereference(self):
self.assertFailIfInactiveSession(gdb_run_cmd("dereference"))
res = gdb_start_silent_cmd("dereference $sp")
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) > 2)
self.assertIn("$rsp", res)
res = gdb_start_silent_cmd("dereference 0x0")
self.assertNoException(res)
self.assertIn("Unmapped address", res)
return
def test_cmd_edit_flags(self):
# force enable flag
res = gdb_start_silent_cmd_last_line("edit-flags +carry")
self.assertNoException(res)
self.assertIn("CARRY ", res)
# force disable flag
res = gdb_start_silent_cmd_last_line("edit-flags -carry")
self.assertNoException(res)
self.assertIn("carry ", res)
# toggle flag
res = gdb_start_silent_cmd_last_line("edit-flags ~carry")
self.assertNoException(res)
self.assertIn("CARRY ", res)
return
def test_cmd_elf_info(self):
res = gdb_run_cmd("elf-info")
self.assertNoException(res)
self.assertIn("7f 45 4c 46", res)
return
def test_cmd_entry_break(self):
res = gdb_run_cmd("entry-break")
self.assertNoException(res)
return
def test_cmd_format_string_helper(self):
cmd = "format-string-helper"
target = "tests/binaries/format-string-helper.out"
res = gdb_run_cmd(cmd,
after=["set args testtest",
"run",],
target=target)
self.assertNoException(res)
self.assertIn("Possible insecure format string:", res)
return
def test_cmd_functions(self):
cmd = "functions"
res = gdb_run_cmd(cmd)
self.assertNoException(res)
self.assertIn("$_heap", res)
return
def test_cmd_got(self):
cmd = "got"
target = "tests/binaries/format-string-helper.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target=target))
res = gdb_start_silent_cmd(cmd, target=target)
self.assertIn("printf", res)
self.assertIn("strcpy", res)
res = gdb_start_silent_cmd("got printf", target=target)
self.assertIn("printf", res)
self.assertNotIn("strcpy", res)
return
def test_cmd_heap_arenas(self):
cmd = "heap arenas"
target = "tests/binaries/heap.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target=target))
res = gdb_start_silent_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn("Arena (base=", res)
return
def test_cmd_heap_set_arena(self):
cmd = "heap set-arena main_arena"
target = "tests/binaries/heap.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target=target))
res = gdb_run_silent_cmd(cmd, target=target, after=["heap arenas",])
self.assertNoException(res)
self.assertIn("Arena (base=", res)
return
def test_cmd_heap_chunk(self):
cmd = "heap chunk p1"
target = "tests/binaries/heap.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target=target))
res = gdb_run_silent_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn("NON_MAIN_ARENA flag: ", res)
return
def test_cmd_heap_chunks(self):
cmd = "heap chunks"
target = "tests/binaries/heap.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target=target))
res = gdb_run_silent_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn("Chunk(addr=", res)
self.assertIn("top chunk", res)
return
def test_cmd_heap_bins_fast(self):
cmd = "heap bins fast"
target = "tests/binaries/heap-fastbins.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target=target))
res = gdb_run_silent_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn("Fastbins[idx=0, size=0x20]", res)
return
def test_cmd_heap_bins_non_main(self):
cmd = 'python gdb.execute("heap bins fast {}".format(get_main_arena().next))'
target = "tests/binaries/heap-non-main.out"
res = gdb_run_silent_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn("size=0x20, flags=PREV_INUSE|NON_MAIN_ARENA", res)
return
def test_cmd_heap_analysis(self):
cmd = "heap-analysis-helper"
target = "tests/binaries/heap-analysis.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
res = gdb_start_silent_cmd(cmd, after=["continue"], target=target)
self.assertNoException(res)
self.assertIn("Tracking", res)
self.assertIn("correctly setup", res)
self.assertIn("malloc(16)=", res)
self.assertIn("calloc(32)=", res)
addr = int(res.split("calloc(32)=")[1].split("\n")[0], 0)
self.assertRegex(res, r"realloc\(.+, 48")
self.assertIn("free({:#x}".format(addr), res)
return
def test_cmd_hexdump(self):
self.assertFailIfInactiveSession(gdb_run_cmd("hexdump $pc"))
res = gdb_start_silent_cmd("hexdump qword $pc")
self.assertNoException(res)
res = gdb_start_silent_cmd("hexdump dword $pc l1")
self.assertNoException(res)
res = gdb_start_silent_cmd("hexdump word $pc l5 reverse")
self.assertNoException(res)
res = gdb_start_silent_cmd("hexdump byte $sp l32")
self.assertNoException(res)
return
def test_cmd_keystone_assemble(self):
valid_cmds = [
"assemble nop; xor eax, eax; int 0x80",
"assemble -a arm -m arm add r0, r1, r2",
"assemble -a mips -m mips32 add $v0, 1",
"assemble -a sparc -m sparc32 set 0, %o0",
"assemble -a arm64 -m little_endian add x29, sp, 0; mov w0, 0; ret"
]
for cmd in valid_cmds:
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) > 1)
return
def test_cmd_patch(self):
self.assertFailIfInactiveSession(gdb_run_cmd("patch"))
return
def test_cmd_patch_byte(self):
res = gdb_start_silent_cmd_last_line("patch byte $pc 0xcc", after=["display/8bx $pc",])
self.assertNoException(res)
self.assertRegex(res, r"0xcc\s*0x[^c]{2}")
return
def test_cmd_patch_word(self):
res = gdb_start_silent_cmd_last_line("patch word $pc 0xcccc", after=["display/8bx $pc",])
self.assertNoException(res)
self.assertRegex(res, r"(0xcc\s*)(\1)0x[^c]{2}")
return
def test_cmd_patch_dword(self):
res = gdb_start_silent_cmd_last_line("patch dword $pc 0xcccccccc", after=["display/8bx $pc",])
self.assertNoException(res)
self.assertRegex(res, r"(0xcc\s*)(\1\1\1)0x[^c]{2}")
return
def test_cmd_patch_qword(self):
res = gdb_start_silent_cmd_last_line("patch qword $pc 0xcccccccccccccccc", after=["display/8bx $pc",])
self.assertNoException(res)
self.assertRegex(res, r"(0xcc\s*)(\1\1\1\1\1\1)0xcc")
return
def test_cmd_patch_qword_symbol(self):
target = "tests/binaries/bss.out"
before = gdb_run_silent_cmd("deref $sp 1", target=target)
after = gdb_run_silent_cmd("patch qword $sp &msg", after=["deref $sp 1",], target=target)
self.assertNoException(before)
self.assertNoException(after)
self.assertNotIn("Hello world!", before)
self.assertIn("Hello world!", after)
return
def test_cmd_patch_string(self):
res = gdb_start_silent_cmd_last_line("patch string $sp \"Gef!Gef!Gef!Gef!\"", after=["grep Gef!Gef!Gef!Gef!",])
self.assertNoException(res)
self.assertIn("Gef!Gef!Gef!Gef!", res)
return
def test_cmd_pattern(self):
cmd = "pattern create 32"
target = "tests/binaries/pattern.out"
res = gdb_run_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn("aaaaaaaabaaaaaaacaaaaaaadaaaaaaa", res)
cmd = "pattern search $rbp"
target = "tests/binaries/pattern.out"
res = gdb_run_cmd(cmd, before=["set args aaaaaaaabaaaaaaacaaaaaaadaaaaaaa", "run"], target=target)
self.assertNoException(res)
self.assertIn("Found at offset", res)
return
def test_cmd_print_format(self):
self.assertFailIfInactiveSession(gdb_run_cmd("print-format"))
res = gdb_start_silent_cmd("print-format $rsp")
self.assertNoException(res)
self.assertTrue("buf = [" in res)
res = gdb_start_silent_cmd("print-format -f js $rsp")
self.assertNoException(res)
self.assertTrue("var buf = [" in res)
res = gdb_start_silent_cmd("print-format -f iDontExist $rsp")
self.assertNoException(res)
self.assertTrue("Language must be :" in res)
return
def test_cmd_process_status(self):
self.assertFailIfInactiveSession(gdb_run_cmd("process-status"))
res = gdb_start_silent_cmd("process-status")
self.assertNoException(res)
self.assertIn("Process Information", res)
self.assertIn("No child process", res)
self.assertIn("No open connections", res)
return
def test_cmd_registers(self):
self.assertFailIfInactiveSession(gdb_run_cmd("registers"))
res = gdb_start_silent_cmd("registers")
self.assertNoException(res)
self.assertIn("$rax", res)
self.assertIn("$eflags", res)
return
def test_cmd_reset_cache(self):
res = gdb_start_silent_cmd("reset-cache")
self.assertNoException(res)
return
def test_cmd_ropper(self):
cmd = "ropper"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
cmd = "ropper --search \"pop %; pop %; ret\""
res = gdb_run_silent_cmd(cmd)
self.assertNoException(res)
self.assertNotIn(": error:", res)
self.assertTrue(len(res.splitlines()) > 2)
return
def test_cmd_scan(self):
cmd = "scan libc stack"
target = "tests/binaries/checksec-no-pie.out"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
res = gdb_start_silent_cmd(cmd, target=target)
self.assertNoException(res)
self.assertIn(target, res)
target = "tests/binaries/default.out"
res = gdb_start_silent_cmd("scan binary libc", target=target)
self.assertNoException(res)
self.assertIn("__libc_start_main", res)
return
def test_cmd_search_pattern(self):
self.assertFailIfInactiveSession(gdb_run_cmd("grep /bin/sh"))
res = gdb_start_silent_cmd("grep /bin/sh")
self.assertNoException(res)
self.assertIn("0x", res)
return
def test_cmd_set_permission(self):
self.assertFailIfInactiveSession(gdb_run_cmd("set-permission"))
target = "tests/binaries/set-permission.out"
res = gdb_run_silent_cmd("set-permission 0x1337000", after=["vmmap",], target=target)
self.assertNoException(res)
line = [ l for l in res.splitlines() if "0x0000000001337000" in l ][0]
line = line.split()
self.assertEqual(line[0], "0x0000000001337000")
self.assertEqual(line[1], "0x0000000001338000")
self.assertEqual(line[2], "0x0000000000000000")
self.assertEqual(line[3], "rwx")
res = gdb_run_silent_cmd("set-permission 0x1338000", target=target)
self.assertNoException(res)
self.assertIn("Unmapped address", res)
return
def test_cmd_shellcode(self):
res = gdb_start_silent_cmd("shellcode")
self.assertNoException(res)
self.assertIn("Missing sub-command (search|get)", res)
return
def test_cmd_shellcode_search(self):
cmd = "shellcode search execve /bin/sh"
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertIn("setuid(0) + execve(/bin/sh) 49 bytes", res)
return
def test_cmd_shellcode_get(self):
res = gdb_start_silent_cmd("shellcode get 77")
self.assertNoException(res)
self.assertIn("Shellcode written to ", res)
return
def test_cmd_stub(self):
cmd = "stub printf"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
return
def test_cmd_theme(self):
res = gdb_run_cmd("theme")
self.assertNoException(res)
possible_themes = [
"context_title_line"
"dereference_base_address"
"context_title_message"
"disable_color"
"dereference_code"
"dereference_string"
"default_title_message",
"default_title_line"
"dereference_register_value",
"xinfo_title_message",
]
for t in possible_themes:
# testing command viewing
res = gdb_run_cmd("theme {}".format(t))
self.assertNoException(res)
# testing command setting
v = "blue blah 10 -1 0xfff bold"
res = gdb_run_cmd("theme {} {}".format(t, v))
self.assertNoException(res)
return
def test_cmd_trace_run(self):
cmd = "trace-run"
res = gdb_run_cmd(cmd)
self.assertFailIfInactiveSession(res)
cmd = "trace-run $pc+1"
res = gdb_start_silent_cmd(cmd,
before=["gef config trace-run.tracefile_prefix /tmp/gef-trace-"])
self.assertNoException(res)
self.assertIn("Tracing from", res)
return
def test_cmd_unicorn_emulate(self):
cmd = "emu -n 1"
res = gdb_run_cmd(cmd)
self.assertFailIfInactiveSession(res)
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertIn("Final registers", res)
return
def test_cmd_vmmap(self):
self.assertFailIfInactiveSession(gdb_run_cmd("vmmap"))
res = gdb_start_silent_cmd("vmmap")
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) > 1)
res = gdb_start_silent_cmd("vmmap stack")
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) > 1)
return
def test_cmd_xfiles(self):
self.assertFailIfInactiveSession(gdb_run_cmd("xfiles"))
res = gdb_start_silent_cmd("xfiles")
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) >= 3)
return
def test_cmd_xinfo(self):
self.assertFailIfInactiveSession(gdb_run_cmd("xinfo $sp"))
res = gdb_start_silent_cmd("xinfo")
self.assertIn("At least one valid address must be specified", res)
res = gdb_start_silent_cmd("xinfo $sp")
self.assertNoException(res)
self.assertTrue(len(res.splitlines()) >= 7)
return
def test_cmd_xor_memory(self):
cmd = "xor-memory display $sp 0x10 0x41"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertIn("Original block", res)
self.assertIn("XOR-ed block", res)
cmd = "xor-memory patch $sp 0x10 0x41"
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertIn("Patching XOR-ing ", res)
return
def test_cmd_highlight(self):
cmds = [
"highlight add 41414141 yellow",
"highlight add 42424242 blue",
"highlight add 43434343 green",
"highlight add 44444444 pink",
'patch string $rsp "AAAABBBBCCCCDDDD"',
"hexdump qword $rsp 2"
]
res = gdb_start_silent_cmd('', after=cmds, strip_ansi=False)
self.assertNoException(res)
self.assertIn("\033[33m41414141\x1b[0m", res)
self.assertIn("\033[34m42424242\x1b[0m", res)
self.assertIn("\033[32m43434343\x1b[0m", res)
self.assertIn("\033[35m44444444\x1b[0m", res)
class TestGefFunctions(GefUnitTestGeneric):
"""Tests GEF internal functions."""
def test_func_get_memory_alignment(self):
res = gdb_test_python_method("get_memory_alignment(in_bits=False)")
self.assertIn(res.splitlines()[-1], ("4", "8"))
return
def test_func_set_arch(self):
res = gdb_test_python_method("current_arch.arch, current_arch.mode", before="set_arch()")
res = (res.splitlines()[-1])
self.assertIn("X86", res)
return
def test_func_which(self):
res = gdb_test_python_method("which('gdb')")
lines = res.splitlines()
self.assertIn("/gdb", lines[-1])
res = gdb_test_python_method("which('__IDontExist__')")
self.assertIn("Missing file `__IDontExist__`", res)
return
def test_func_get_filepath(self):
res = gdb_test_python_method("get_filepath()", target="/bin/ls")
self.assertNoException(res)
subprocess.call(["cp", "/bin/ls", "/tmp/foo bar"])
res = gdb_test_python_method("get_filepath()", target="/tmp/foo bar")
self.assertNoException(res)
subprocess.call(["rm", "/tmp/foo bar"])
return
def test_func_get_pid(self):
res = gdb_test_python_method("get_pid()", target="/bin/ls")
self.assertNoException(res)
self.assertTrue(int(res.splitlines()[-1]))
return
class TestGdbFunctions(GefUnitTestGeneric):
"""Tests gdb convenience functions added by GEF."""
def test_func_base(self):
cmd = "x/s $_base()"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertIn("\\177ELF", res)
addr = res.splitlines()[-1].split()[0][:-1]
cmd = "x/s $_base(\"libc\")"
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertIn("\\177ELF", res)
addr2 = res.splitlines()[-1].split()[0][:-1]
self.assertNotEqual(addr, addr2)
return
def test_func_heap(self):
cmd = "deref $_heap()"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target="tests/binaries/heap.out"))
res = gdb_run_silent_cmd(cmd, target="tests/binaries/heap.out")
self.assertNoException(res)
self.assertIn("+0x0048:", res)
cmd = "deref $_heap(0x10+0x10)"
res = gdb_run_silent_cmd(cmd, target="tests/binaries/heap.out")
self.assertNoException(res)
self.assertIn("+0x0048:", res)
return
def test_func_got(self):
cmd = "deref $_got()"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target="tests/binaries/heap.out"))
res = gdb_run_silent_cmd(cmd, target="tests/binaries/heap.out")
self.assertNoException(res)
self.assertIn("malloc", res)
return
def test_func_bss(self):
cmd = "deref $_bss()"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd, target="tests/binaries/bss.out"))
res = gdb_run_silent_cmd(cmd, target="tests/binaries/bss.out")
self.assertNoException(res)
self.assertIn("Hello world!", res)
return
def test_func_stack(self):
cmd = "deref $_stack()"
self.assertFailIfInactiveSession(gdb_run_cmd(cmd))
res = gdb_start_silent_cmd(cmd)
self.assertNoException(res)
self.assertRegex(res, r"\+0x0*20: *0x0000000000000000\n")
return
class TestGefMisc(GefUnitTestGeneric):
"""Tests external functionality."""
def test_update(self):
tempdir = tempfile.mkdtemp()
update_gef = os.path.join(tempdir, "gef.py")
subprocess.call(["cp", "/tmp/gef.py", update_gef])
status = subprocess.call(["python3", update_gef, "--update"])
self.assertEqual(status, 0)
def run_tests(name=None):
runner = unittest.TextTestRunner(verbosity=3)
unittest.main(testRunner=runner)
if __name__ == "__main__":
sys.exit(run_tests())
| 35.946203
| 119
| 0.632318
|
f3e544b55c776928cf98c8014c8742af45791679
| 2,173
|
py
|
Python
|
OCR/OCRkaggle.py
|
Demonliquid/cars-python-cleaning
|
91c516a33c4522114dc024cfaf04f1c1d594f973
|
[
"MIT"
] | null | null | null |
OCR/OCRkaggle.py
|
Demonliquid/cars-python-cleaning
|
91c516a33c4522114dc024cfaf04f1c1d594f973
|
[
"MIT"
] | null | null | null |
OCR/OCRkaggle.py
|
Demonliquid/cars-python-cleaning
|
91c516a33c4522114dc024cfaf04f1c1d594f973
|
[
"MIT"
] | null | null | null |
#%%
# Generic Libraries
from PIL import Image
import os
import pandas as pd
import numpy as np
import re,string,unicodedata
#Tesseract Library
import pytesseract
#Warnings
import warnings
warnings.filterwarnings("ignore")
#Garbage Collection
import gc
#Gensim Library for Text Processing
import gensim.parsing.preprocessing as gsp
from gensim import utils
#TextBlob Library (Sentiment Analysis)
from textblob import TextBlob, Word
#Plotting Libraries
import matplotlib.pyplot as plt
import seaborn as sns
# %%
sample_images = r'C:\Users\Martin96\Desktop\Laburo\Registros'
ex_txt = []
processes = [
gsp.strip_tags,
gsp.strip_punctuation,
gsp.strip_multiple_whitespaces,
gsp.strip_numeric,
gsp.remove_stopwords,
gsp.strip_short,
gsp.stem_text
]
pytesseract.tesseract_cmd = r'C:\Program Files\Tesseract-OCR\tesseract.exe'
# %%
def traverse(directory):
path, dirs, files = next(os.walk(directory))
fol_nm = os.path.split(os.path.dirname(path))[-1]
print(f'Number of files found in "{fol_nm}" : ',len(files))
def TxtExtract(directory):
"""
This function will handle the core OCR processing of images.
"""
for subdir, dirs, files in os.walk(directory):
for file in files:
filepath = subdir + os.sep + file
text = pytesseract.image_to_string(Image.open(filepath), timeout=5)
if not text:
ex_txt.extend([[file, "blank"]])
else:
ex_txt.extend([[file, text]])
fol_nm = os.path.split(os.path.dirname(subdir))[-1]
print(f"Text Extracted from the files in '{fol_nm}' folder & saved to list..")
def proc_txt(txt):
text = txt.lower()
text = utils.to_unicode(text)
for p in processes:
text = p(text)
return text
# %%
traverse(sample_images)
# %%
TxtExtract(sample_images)
# %%
ext_df = pd.DataFrame(ex_txt,columns=['FileName','Text'])
# %%
ext_df['Text_Pr'] = ext_df['Text'].apply(lambda x: proc_txt(x))
# %%
ext_df_txt = ext_df[( ext_df['Text_Pr'] != 'blank' )]
| 21.73
| 82
| 0.638748
|
bf539c9dc207774e4a2468e090e425327ed44312
| 5,073
|
py
|
Python
|
queryResponder.py
|
azharhappy/Nancy-VA--MacOS
|
e2825f1e61e5f5c995de9cdda8e836d4a34cbb7d
|
[
"MIT"
] | 4
|
2017-01-03T07:26:10.000Z
|
2017-06-28T11:16:39.000Z
|
queryResponder.py
|
azhar22k/Nancy-VA--MacOS
|
e2825f1e61e5f5c995de9cdda8e836d4a34cbb7d
|
[
"MIT"
] | null | null | null |
queryResponder.py
|
azhar22k/Nancy-VA--MacOS
|
e2825f1e61e5f5c995de9cdda8e836d4a34cbb7d
|
[
"MIT"
] | 2
|
2018-03-27T13:00:58.000Z
|
2019-01-14T14:55:59.000Z
|
from notify import notify
from re import match,sub
from audioOutput import speak,speakWiki
from _thread import start_new_thread
from subprocess import getoutput
def search(Input):
# no data received
if Input == "":
notify(message="Sorry! Did you say something?")
return
# Command for quiting
if Input in ['quit', 'terminate']:
speak("Bye")
Input = 'terminate'
return
#Command to lock PC
if Input in ['lock','lock my mac','lock my pc']:
speak("See you soon")
getoutput("/System/Library/CoreServices/Menu\ Extras/User.menu/Contents/Resources/CGSession -suspend")
return
#Command to reboot
if Input in ['reboot', 'reboot my mac', 'reboot my pc']:
speak("See you soon")
getoutput("osascript -e 'tell application \"System Events\" to restart'")
return
# Command to shutdown
if Input in ['shutdown', 'shutdown my mac', 'shutdown my pc']:
speak("See you soon")
getoutput("osascript -e 'tell application \"System Events\" to shut down'")
return
# Command for Self Intoduction
if Input in ["who are you", "introduce yourself", "describe yourself"]:
answer = 'I am Nancy, your personal assistant.'
notify(title=Input, subtitle='I got this:', message=answer)
speak(answer)
return
# Command for Owner Information
if Input in ["who created you", "who is your master", "who is your owner"]:
answer = "Team Errorist created me, Although I'm open source!"
notify(title=Input, subtitle='I got this:', message=answer)
speak(answer)
return
# Command for opening maps
if match(r"^open maps.*$", Input):
from webHandler import openMaps
Input = Input.replace("open maps", " ")
openMaps(Input)
speak("Here It is...")
return
# Command for downloading lyrics
if match(r"^download lyrics.*$", Input):
from lyrics import lyrics_down
lyrics_down(Input)
return
#Command to open Applications
if match(r"^execute.*$",Input):
from fInderAndAppControl import openApp
Input=Input.replace("execute ","")
openApp(Input)
speak('There you go')
return
#Command to open a file
if match(r"^open file.*$",Input):
Input=Input.replace("open file ","")
from fInderAndAppControl import openFile
openFile(Input)
return
#Command to open a directory
if match(r"^open folder.*$",Input):
Input=Input.replace("open folder ","")
from fInderAndAppControl import openFolder
openFolder(Input)
return
#Command to play a song
if match(r"^play song.*$",Input):
Input=Input.replace("play song ","")
from fInderAndAppControl import openFile
openFile(Input,direc="Music")
return
#Command to play video
if match(r"^play video.*$",Input):
Input=Input.replace("play video ","")
from fInderAndAppControl import openFile
openFile(Input,direc="Movies")
return
# Commamnd for browsing a website
if match(r"^browse.*$", Input):
from webHandler import browseUrl
Input = Input.replace("browse ", " ")
browseUrl(Input)
return
# Command to throw a dice
if match(r"^throw a dice$", Input):
from randomStuff import dice
output = str(dice())
notify(message=output)
speak(output)
return
# Command to toss a coin
if match(r"^toss a coin$", Input):
from randomStuff import coin
output = coin()
notify(message=output)
speak(output)
return
# Command to download mp3 song
if match(r"^download (audio)|(song).*$", Input):
from mp3Download import page_link
Input = sub(r"download audio|song|mp3 ", '', Input)
#page_link(Input)
start_new_thread(page_link,(Input,))
return
# Command to download mp4 video
if match(r"^download video.*$", Input):
from mp4Download import youtube_link
Input = sub(r"download video ", '', Input)
#youtube_link(Input)
start_new_thread(youtube_link,(Input,))
return
# Command to read it aloud
if match(r"^(read out)|(speak out loud)$", Input):
from pyperclip import paste
speak(paste())
return
try:
from settings import client
print('Trying wolframalpha')
result = client.query(Input)
answer = next(result.results).text
notify(title=Input, subtitle='I got this:', message=answer)
speak(answer)
except:
try:
print('Trying wikipedia')
from wikipedia import summary
answer = summary(Input, sentences=1)
print(answer)
notify(title=Input, subtitle='I got this:', message=answer)
speakWiki(answer)
except Exception as err:
notify(message='Opps Nothing Found', extra='-timeout 1')
| 31.70625
| 110
| 0.607727
|
22212ea305e9eca5da86e11db2f5ceb9fe034ab9
| 1,157
|
py
|
Python
|
python/src/nnabla/backward_function/sub2.py
|
daniel-falk/nnabla
|
3fe132ea52dc10521cc029a5d6ba8f565cf65ccf
|
[
"Apache-2.0"
] | 2,792
|
2017-06-26T13:05:44.000Z
|
2022-03-28T07:55:26.000Z
|
python/src/nnabla/backward_function/sub2.py
|
daniel-falk/nnabla
|
3fe132ea52dc10521cc029a5d6ba8f565cf65ccf
|
[
"Apache-2.0"
] | 138
|
2017-06-27T07:04:44.000Z
|
2022-02-28T01:37:15.000Z
|
python/src/nnabla/backward_function/sub2.py
|
daniel-falk/nnabla
|
3fe132ea52dc10521cc029a5d6ba8f565cf65ccf
|
[
"Apache-2.0"
] | 380
|
2017-06-26T13:23:52.000Z
|
2022-03-25T16:51:30.000Z
|
# Copyright 2019,2020,2021 Sony Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .utils import sum_for_arithmetics
def sub2_backward(inputs, inplace=False):
"""
Args:
inputs (list of nn.Variable): Incomming grads/inputs to/of the forward function.
kwargs (dict of arguments): Dictionary of the corresponding function arguments.
Return:
list of Variable: Return the gradients wrt inputs of the corresponding function.
"""
dy = inputs[0]
x0 = inputs[1]
x1 = inputs[2]
dx0 = dy
dx1 = -dy
dx0 = sum_for_arithmetics(dx0, x0)
dx1 = sum_for_arithmetics(dx1, x1)
return dx0, dx1
| 32.138889
| 86
| 0.717373
|
cc6e336448cca85971924b5878d388e08a3f0a49
| 4,282
|
py
|
Python
|
aiohttp_mock/router.py
|
TestInABox/aiohttp-mock
|
4c4cc53c39ac65245fd1fbcc8bebf6fe3814b15d
|
[
"Apache-2.0"
] | 1
|
2015-10-05T19:40:51.000Z
|
2015-10-05T19:40:51.000Z
|
aiohttp_mock/router.py
|
BenjamenMeyer/aiohttp-mock
|
7d14892a6db4f48d377e22f01ed16c9364fe5e42
|
[
"Apache-2.0"
] | 9
|
2015-10-13T22:25:14.000Z
|
2015-10-16T04:46:00.000Z
|
aiohttp_mock/router.py
|
BenjamenMeyer/aiohttp-mock
|
7d14892a6db4f48d377e22f01ed16c9364fe5e42
|
[
"Apache-2.0"
] | 1
|
2016-05-06T15:39:40.000Z
|
2016-05-06T15:39:40.000Z
|
# Copyright 2015 by Benjamen R. Meyer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from aiohttp_mock.exceptions import *
from aiohttp.client_reqrep import ClientResponse
from aiohttp_mock.utils import cidict
class ConnectionRouterHandler(object):
"""Handler for a given URI
This class handles all the HTTP Verbs for a given URI.
"""
def __init__(self, uri):
self.uri = uri
self._method_handlers = {}
def add_method_handler(self, method, handler):
"""Add or update the Method handler
:param method: string - HTTP Verb
:param handler: ClientResponse object or callable
that will be used to respond to
the request
"""
self._method_handlers[method] = handler
def handle(self, method, request):
"""Handle a request
:param method: string - HTTP Verb
:param request: aiohttp.client_reqrep.ClientRequest
:returns: aiohttp.client_reqrep.ClientResponse
Note: Returns an HTTP 405 if the HTTP Verb is not
supported
"""
# If the method has a registered handler, then
# return it. Otherwise, create a 405 response
if method in self._method_handlers:
handler = self._method_handlers[method]
# Callbacks must be callables
if hasattr(handler, '__call__'):
return self._method_handlers[method](_request)
else:
return handler
else:
response = ClientResponse(method, self.uri, host='aiohttp_mock')
response.status = 405
response.reason = 'Method Not Supported'
response._should_close = False
response._headers = cidict({
'x-agent': 'aiohttp-mock',
'content-length': 0
})
return response
class ConnectionRouter(object):
def __init__(self):
self._routes = {}
def reset(self):
"""Reset all the routes
"""
self._routes = {}
def add_route(self, uri):
"""Add a route to be managed
:param uri: string - URI to be handled
"""
if uri not in self._routes:
self._routes[uri] = ConnectionRouterHandler(uri)
def get_route(self, uri):
"""Access the handler for a URI
:param uri: string - URI of the request
:returns: ConnectionRouterHandler instance managing the route
:raises: RouteNotHandled if the route is not handled
"""
if uri in self._routes:
return self._routes[uri]
else:
raise RouteNotHandled('{0} not handled'.format(uri))
def add_route_handler(self, uri, method, handler):
"""Add an HTTP Verb handler to the URI
:param uri: string - URI that the handler is for
:param method: string - HTTP Verb the handler is for
:param handle: ClientResponse or callable that will handle the request
"""
try:
router = self.get_route(uri)
except RouteNotHandled:
self.add_route(uri)
router = self.get_route(uri)
router.add_method_handler(method, handler)
def handle(self, method, uri, request):
"""Handle a request and create a response
:param method: string - HTTP Method the request is calling
:param uri: string - URI the request is for
:param request: aiohttp.client_reqreq.ClientRequest instance
for the request
:returns: aiohttp.client_reqrep.ClientResponse instance
:raises: RouteNotHandled if the route is not handled
"""
router = self.get_route(uri)
return router.handle(method, request)
| 31.955224
| 78
| 0.625175
|
57e92917747ace6590e637d453309898de5422c9
| 270
|
py
|
Python
|
examples/verify_bitmex_timestamps.py
|
mstumberger/Quantdom
|
2649aba90c741618a75900691480ddb720c461f4
|
[
"Apache-2.0"
] | 1
|
2018-10-04T17:10:40.000Z
|
2018-10-04T17:10:40.000Z
|
examples/verify_bitmex_timestamps.py
|
mstumberger/Quantdom
|
2649aba90c741618a75900691480ddb720c461f4
|
[
"Apache-2.0"
] | null | null | null |
examples/verify_bitmex_timestamps.py
|
mstumberger/Quantdom
|
2649aba90c741618a75900691480ddb720c461f4
|
[
"Apache-2.0"
] | null | null | null |
import json
import pandas as pd
from datetime import datetime
df = pd.DataFrame(json.loads(open('prices.json').read()), columns=['o', 'c', 'v'])
print(df.head(20))
# print(df.resample('10min').T)
# for item in data.keys():
# print(datetime.fromtimestamp(int(item)))
| 30
| 82
| 0.688889
|
91d57f7bce200bda8f86ddb726093e7a4c71fcfa
| 3,195
|
py
|
Python
|
framework/operation_assertions/operation_assertions_common.py
|
Creatordev/creator-system-test-framework
|
e92ea7315ccf1ee2fefc205fb0f4f95eb48b105f
|
[
"BSD-3-Clause"
] | 2
|
2016-08-02T22:11:47.000Z
|
2016-08-02T22:13:13.000Z
|
framework/operation_assertions/operation_assertions_common.py
|
Creatordev/creator-system-test-framework
|
e92ea7315ccf1ee2fefc205fb0f4f95eb48b105f
|
[
"BSD-3-Clause"
] | null | null | null |
framework/operation_assertions/operation_assertions_common.py
|
Creatordev/creator-system-test-framework
|
e92ea7315ccf1ee2fefc205fb0f4f95eb48b105f
|
[
"BSD-3-Clause"
] | null | null | null |
#/************************************************************************************************************************
# Copyright (c) 2016, Imagination Technologies Limited and/or its affiliated group companies.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#************************************************************************************************************************/
from framework.awa_enums import AwaError
from framework.awa_enums import AwaLWM2MError
def _CheckExceptionHasError(testCase, exceptionContext, error, pathResultError, pathResultLWM2MError):
testCase.assertGreater(len(exceptionContext.args), 3)
testCase.assertEqual(error, exceptionContext.args[1], str(AwaError(exceptionContext.args[1])) + " != " + str(error) +" " + str(exceptionContext.args))
assertionMessage = str(AwaError(exceptionContext.args[2])) + " != " + str(pathResultError) +" " + str(exceptionContext.args)
testCase.assertEqual(pathResultError, exceptionContext.args[2], assertionMessage)
assertionMessage = str(AwaLWM2MError(exceptionContext.args[3])) + " != " + str(pathResultLWM2MError) +" " + str(exceptionContext.args)
testCase.assertEqual(pathResultLWM2MError, exceptionContext.args[3], assertionMessage)
def CheckForException(testCase, successFunction, assertion, exception, awaError, pathResultError=AwaError.Unspecified, pathResultLWM2MError=AwaLWM2MError.Unspecified):
try:
successFunction(testCase, assertion)
except exception as exceptionContext:
if awaError != None:
_CheckExceptionHasError(testCase, exceptionContext, awaError, pathResultError, pathResultLWM2MError)
else:
testCase.fail("Exception " + str(exception) +" was not thrown")
| 72.613636
| 167
| 0.715493
|
21f9e1c171fc8d95646881d92733ebebec2ab675
| 15,627
|
py
|
Python
|
leolo/tests/tests.py
|
Menda/Leolo
|
1fe4d26d4b3717fe5fb1b8c6e9eb6ca11ad44c55
|
[
"Apache-2.0"
] | 1
|
2020-02-13T20:55:39.000Z
|
2020-02-13T20:55:39.000Z
|
leolo/tests/tests.py
|
Menda/Leolo
|
1fe4d26d4b3717fe5fb1b8c6e9eb6ca11ad44c55
|
[
"Apache-2.0"
] | null | null | null |
leolo/tests/tests.py
|
Menda/Leolo
|
1fe4d26d4b3717fe5fb1b8c6e9eb6ca11ad44c55
|
[
"Apache-2.0"
] | null | null | null |
import unittest
import os
from sets import Set
import urllib
from leolo.manager import Manager
import settings
valid_urls = (
# url, name, last-modified, last-entrylink
("http://leolo.s3.amazonaws.com/rss_django.xml",
"The Django weblog",
"Fri, 17 Sep 2010 16:29:04 GMT",
"http://www.djangoproject.com/weblog/2010/sep/10/123/",
),
("http://leolo.s3.amazonaws.com/rss_fsf.xml",
"Recent blog posts",
"Fri, 17 Sep 2010 16:31:11 GMT",
"http://www.fsf.org/blogs/community/whos-using-free-software",
),
("http://leolo.s3.amazonaws.com/rss_mozilla.xml",
"European Mozilla Community Blog",
"Fri, 17 Sep 2010 16:35:16 GMT",
"http://blogs.mozilla-europe.org/?post/2010/09/03/Meet-Kerim-Kalamujic%2C-Bosnian-Contributor%21",
),
("http://leolo.s3.amazonaws.com/rss_python.xml",
"Python News",
"Fri, 17 Sep 2010 16:29:04 GMT",
"http://www.python.org/news/index.html#Mon06Sep20108300200",
),
)
invalid_urls = (
# url, name
("http://leolo.s3.amazonaws.com/no_rss0.xml",
None),
("http://leolo.s3.amazonaws.com/no_rss1.xml",
None),
)
illformed = (
"http://leolo.s3.amazonaws.com/atom_invalid01.xml",
"http://leolo.s3.amazonaws.com/atom_invalid02.xml",
"http://leolo.s3.amazonaws.com/atom_invalid03.xml",
"http://leolo.s3.amazonaws.com/rss_invalid01.xml",
"http://leolo.s3.amazonaws.com/rss_invalid02.xml",
"http://leolo.s3.amazonaws.com/rss_invalid03.xml",
"http://leolo.s3.amazonaws.com/rss_invalid04.xml",
)
# sed 's/http/\^http/g' rss_mp3blog03.html | tr -s "^" "\n" | grep http | sed 's/[\ |\\\|\"].*//g' | sed "s/['].*//g" | sort | uniq | grep mp3
mp3blogs = (
("http://leolo.s3.amazonaws.com/rss_mp3blog01.xml",
"http://downloads.pitchforkmedia.com/ifyouwantit.mp3",
"http://dl.dropbox.com/u/12242045/01%20the%20lonely%20smurfer.mp3",
"http://dl.dropbox.com/u/12242045/03%20Jesus%20words%20on%20a%20surimi%20stick.mp3",
"http://stadiumsandshrines.com/wordpress/wp-content/uploads/2010/10/SAVED.mp3",
"http://stadiumsandshrines.com/wordpress/wp-content/uploads/2010/09/Luftwaffe-Quiet-Summer-02-Old-Friends.mp3",
"http://www.box.net/shared/static/z0hbbbk1sg.mp3",
"http://dl.dropbox.com/u/12242045/Husband%20-%20Feelings.mp3",
"http://downloads.pitchforkmedia.com/My%20Dry%20Wet%20Mess%20-%20Etcetera.mp3",
"http://dl.dropbox.com/u/12242045/PEPEPIANO%20-%20bruce%20springsteen.mp3",
"http://mp3.imposemagazine.com/greatest-hits-uptown-girl.mp3",
"http://alteredzones.com/dl/audio/548/gauntlet-hair-out-dont.mp3",
"http://dl.dropbox.com/u/12242045/02%20My%20Love.mp3",
"http://dl.dropbox.com/u/12242045/04%20Colours.mp3",
"http://www.20jazzfunkgreats.co.uk/wordpress/wp-content/uploads/2010/10/DMX_Krew-Mr_Blue.mp3",
"http://www.mediafire.com/file/83ncudhwson69jd/Cursed%20Kids%20-%20Eugene.mp3",
"http://fakepennycomics.com/blog/NN_Beeswaxunouomeduderemix.mp3",
"http://dl.dropbox.com/u/12242045/Rovers.mp3",
"http://www.listenbeforeyoubuy.net/wp-content/uploads/2010/07/Night%20Manager/Night%20Manager%20-%20Blackout%20Sex.mp3",
"http://dl.dropbox.com/u/12242045/herrek%20-%20exile.mp3",
"http://www.deliciouscopitone.com/mp3/lovesong.mp3",
"http://www.deliciouscopitone.com/mp3/zordon.mp3",
"http://home.comcast.net/~doodlebug58/LOONGOON.mp3",
),
("http://leolo.s3.amazonaws.com/rss_mp3blog02.xml",
"http://www.chromewaves.net/mp3/radio/Superchunk-ScaryMonsters(andSuperCreeps).mp3",
"http://www.killbeatmusic.com/modernsuperstitions/modern_superstitions-visions_of_you.mp3",
"http://www.chromewaves.net/mp3/Darcys-HouseBuiltAroundYourVoice.mp3",
"http://www.killbeatmusic.com/rebekahhiggs/rebekah_higgs-little_voice.mp3",
"http://www.styrofoamones.com/BlueLines.mp3",
"http://230publicity.com/audio/01TySegall.mp3",
"http://media.dallasobserver.com/5430274.0.mp3",
"http://blog.limewire.com/wp-content/uploads/2010/10/Liz-Phair-FUNSTYLE-05-My-My.mp3",
"http://downloads.pitchforkmedia.com/Diamond%20Rings%20-%20Something%20Else.mp3",
"http://www.beggarsgroupusa.com/mp3/BlondeRedhead_HereSometimes.mp3",
"http://promo.beggars.com/us/mp3/blonderedhead_23.mp3",
"http://thefader.cachefly.net/blonde-redhead-not-getting-there.mp3",
"http://cache.epitonic.com/files/reg/songs/mp3/Blonde_Redhead-Misery_Is_A_Butterfly.mp3",
"http://www.epitonic.com/files/reg/songs/mp3/Blonde_Redhead-In_Particular.mp3",
"http://www.tgrec.com/media/94.mp3",
"http://downloads.pitchforkmedia.com/The%20Smith%20Westerns%20-%20Imagine,%20Pt.%203.mp3",
"http://pitchperfectpr.com/mp3/Give%20Me%20More.mp3",
"http://www.epitonic.com/files/reg/songs/mp3/Blonde_Redhead-Missile.mp3",
"http://www.tgrec.com/media/78.mp3",
"http://audio.sxsw.com/2010/mp3/Diamond_Rings-All_Yr_Songs.mp3",
"http://media.nme.com.edgesuite.net/audio/2010/march/waitandsee.mp3",
"http://www.matadorrecords.com/mpeg/fucked_up/no_epiphany.mp3",
"http://www.chromewaves.net/mp3/Sadies-AnotherYearAgain.mp3",
"http://www.outofthisspark.com/forestcitylovers_carriage_LightYouUp.mp3",
"http://www.scjag.com/mp3/sc/nursery.mp3",
"http://www.chromewaves.net/mp3/radio/SkyLarkin-Barracuda.mp3",
"http://www.frankichan.com/mattandkim/yeayeah.mp3",
"http://www.matadorrecords.com/mpeg/mission_of_burma/mission_of_burma_1_2_3_partyy.mp3",
"http://www.matadorrecords.com/mpeg/mission_of_burma/mission_of_burma_max_ernst.mp3",
"http://www.scjag.com/mp3/do/adamsturtle.mp3",
"http://www.scjag.com/mp3/do/lifeofbirds.mp3",
"http://www.scjag.com/mp3/do/cockles.mp3",
),
("http://leolo.s3.amazonaws.com/rss_mp3blog03.xml",
"http://therslweblog.readyhosting.com/Remy%20Zero%2001%20Intro%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2002%20Belong%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2003%20Apology%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2004%20Smile%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2005%20Life%20in%20Rain%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2006%20Bitter%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2007%20Twister%20-%20Houston.mp3",
"http://therslweblog.readyhosting.com/Remy%20Zero%2008%20Save%20Me%20-%20Houston.mp3",
),
("http://leolo.s3.amazonaws.com/rss_mp3blog04.xml",
"http://werunfl.com/Penned/MP3/Ben/A-Trak%20-%20Trizzy%20Turnt%20Up%20%28Dirty%29.mp3",
"http://werunfl.com/Penned/MP3/Ben/A-Trak%20-%20Trizzy%20Turnt%20Up%20(Dirty).mp3",
"http://werunfl.com/Penned/MP3/Ben/Cold%20Blank%20-%20The%20Flying%20Cat%20%28the%20%20Bulgarian%20Remix%29.mp3",
"http://werunfl.com/Penned/MP3/Ben/Cold%20Blank%20-%20The%20Flying%20Cat%20(the%20%20Bulgarian%20Remix).mp3",
"http://werunfl.com/Penned/MP3/Ben/Douster%20n%20Savage%20Skulls%20-%20Bad%20Gal.mp3",
"http://werunfl.com/Penned/MP3/CJ/Body%20Jack%20%28Original%20Mix%29.mp3",
"http://werunfl.com/Penned/MP3/CJ/Body%20Jack%20(Original%20Mix).mp3",
"http://werunfl.com/Penned/MP3/CJ/Fuck%20You%20%28Le%20Castle%20Vania%20Remix%29.mp3",
"http://werunfl.com/Penned/MP3/CJ/Fuck%20You%20(Le%20Castle%20Vania%20Remix).mp3",
"http://werunfl.com/Penned/MP3/CJ/Hello%20%28Original%20Mix%29.mp3",
"http://werunfl.com/Penned/MP3/CJ/Hello%20(Original%20Mix).mp3",
"http://werunfl.com/Penned/MP3/DC/Aerotronic%20-%20Sex%20&%20Cigarettes%28Hostage%20Remix%29.mp3",
"http://werunfl.com/Penned/MP3/DC/Blastaguyz-SkinnedBitch.mp3",
"http://werunfl.com/Penned/MP3/Evan/04-armin_van_buuren_vs_sophie_ellis-bextor-not_giving_up_on_love_(dash_berlin_4am_mix).mp3",
"http://werunfl.com/Penned/MP3/Evan/Alex%20Armes%20-%20No%20Reasons_Christian%20Vila%20%20Jordi%20Sanchez%20Mix%20%20Laidback%20Luke%20Edit.mp3",
"http://werunfl.com/Penned/MP3/Evan/Boy%208-Bit%20-%2001.%20Suspense%20Is%20Killing%20Me%20(Philipe%20De%20Boyar%20remix).mp3",
"http://werunfl.com/Penned/MP3/Evan/Get%20Busy%20(Lee%20Mortimer%20Remix).mp3",
"http://werunfl.com/Penned/MP3/Justin/Sweet%20Disposition%20%28Knowlton%20Walsh%20remix%29.mp3",
"http://werunfl.com/Penned/MP3/Justin/Uffie%20-%20MCs%20Can%20Kiss%20%28Far%20Too%20Loud%20Refix%29.mp3",
"http://werunfl.com/Penned/MP3/Manley/Hijack%20vs%20Hatiras%20Possessed%20By%20A%20Bitch%20(JLR%20Mashapella%20Mashup).mp3",
"http://werunfl.com/Penned/MP3/Manley/Like%20a%20G6.mp3",
"http://werunfl.com/Penned/MP3/Manley/Mason%20feat.%20DMC%20Sam%20Sparro%20-%20Corrected%20(Riva%20Starr%20Vocal%20Mix).mp3",
"http://werunfl.com/Penned/MP3/Manley/OneMonster_Morningstar_Mix.mp3",
"http://werunfl.com/Penned/MP3/Manley/ZOMBIES%20(Kiddie%20Smile%20Remix).mp3",
"http://werunfl.com/Penned/MP3/Mark/The%20Lempo%20and%20Japwow%20Project%20-%20Pump%20Pump%20%28Original%20Mix%29.mp3",
"http://werunfl.com/Penned/MP3/Nicolas/Clap%20Your%20Hands%20(Diplo%20Remix).mp3",
"http://werunfl.com/Penned/MP3/Nicolas/Pet%20Monster.mp3",
"http://werunfl.com/Penned/MP3/Ben/Haddaway%20-%20What%20Is%20Love%20(DJ%20Ethos%20Remix).mp3",
"http://werunfl.com/Penned/MP3/Ben/Haddaway%20-%20What%20Is%20Love%20(DJ%20Ethos%20Remix).mp3",
"http://werunfl.com/Penned/MP3/Nicolas/Love%20Party.mp3",
"http://werunfl.com/Penned/MP3/Nicolas/Feels%20So%20Real%20(Douster%20and%20Savage%20Skulls%20Jersey%20Shore%20Remix).mp3",
"http://werunfl.com/Penned/MP3/DC/Matta%20-Vortex.mp3",
"http://werunfl.com/Penned/MP3/DC/Aerotronic%20-%20Sex%20&%20Cigarettes%28Hostage%20Remix%29.mp3",
"http://werunfl.com/Penned/MP3/DC/Matta%20-Vortex.mp3",
"http://werunfl.com/Penned/MP3/Mark/The%20Lempo%20and%20Japwow%20Project%20-%20Pump%20Pump%20(Original%20Mix).mp3",
),
("http://leolo.s3.amazonaws.com/rss_mp3blog05.xml",),
)
class Test(unittest.TestCase):
"""
Battery tests for Leolo.
"""
def test00_add_site_as_read(self):
"""
Add a valid site and mark the last link as read.
"""
m = Manager()
valid = valid_urls[0]
m.del_site(valid[0])
name = m.add_site(valid[0], True)
self.assertEqual(name, valid[1])
m.update_sites()
sites = m.get_sites()
for site in sites:
if site.feed.url == valid[0]:
self.assertEqual(site.feed.last_entrylink, valid[3])
self.assertEqual(site.feed.updated, False)
break
m.del_site(valid[0])
def test01_add_sites(self):
"""
Add all valid sites.
"""
m = Manager()
for valid in valid_urls:
m.del_site(valid[0])
name = m.add_site(valid[0])
self.assertEqual(name, valid[1])
def test02_invalid_sites(self):
"""
Try to add all invalid sites.
"""
m = Manager()
for invalid in invalid_urls:
name = m.add_site(invalid[0])
self.assertEqual(name, invalid[1])
def test03_illformed_feeds(self):
"""
Try to add all illformed feeds.
"""
m = Manager()
for url in illformed:
self.assertEqual(m.add_site(url, True), None)
def test04_update(self):
"""
Updates all feeds.
"""
m = Manager()
m.update_sites()
sites = m.get_sites()
for site in sites:
for valid in valid_urls:
if site.feed.url == valid[0]:
self.assertEqual(site.title, valid[1])
self.assertEqual(site.feed.last_modified, valid[2])
self.assertEqual(site.feed.last_entrylink, valid[3])
self.assertEqual(site.feed.updated, True)
for valid in valid_urls:
m.del_site(valid[0])
def test05_update(self):
"""
Updates first 2 feeds.
"""
m = Manager()
m.update_sites(0, 2)
sites = m.get_sites()
count = 0
for site in sites:
for valid in valid_urls:
if site.feed.url == valid[0]:
print count
if count < 2:
self.assertEqual(site.feed.updated, True)
else:
self.assertEqual(site.feed.updated, False)
count += 1
for valid in valid_urls:
m.del_site(valid[0])
def test06_update(self):
"""
Doesn't update because Last-Modified header is the same than last time.
"""
m = Manager()
m.update_sites(0)
sites = m.get_sites()
for site in sites:
for valid in valid_urls:
if site.feed.url == valid[0]:
self.assertEqual(site.feed.updated, False)
def test07_update(self):
"""
Doesn't update or download headers because last time was less than
1 minute ago.
"""
m = Manager()
m.update_sites(1)
sites = m.get_sites()
for site in sites:
for valid in valid_urls:
if site.feed.url == valid[0]:
self.assertEqual(site.feed.updated, False)
def test08_update(self):
"""
Checking arguments.
"""
m = Manager()
self.assertRaises(TypeError, m.update_sites, "string", None)
self.assertRaises(ValueError, m.update_sites, -1, None)
self.assertRaises(TypeError, m.update_sites, 1, "string")
self.assertRaises(ValueError, m.update_sites, 1, 0)
def test08_mp3blogs(self):
"""
Getting links from mp3 blogs.
"""
m = Manager()
q = "enclosure_ends=mp3 or embed_ends=mp3 or link_ends=mp3"
for blog in mp3blogs:
m.del_site(blog[0])
name = m.add_site(blog[0])
if not name:
raise AssertionError("Couldn't add feed '%s'." % blog[0])
m.update_sites().filter(q)
sites = m.get_sites()
for site in sites:
if site.feed.url == blog[0]:
if len(blog) > 1:
self.assertEqual(site.feed.updated, True)
entries = site.feed.entries
urls = []
for entry in entries:
for l in entry.links:
urls.append(l)
for l in entry.enclosures:
urls.append(l)
urls = Set(urls)
for i in range(1, len(blog)):
if not blog[i] in urls:
raise AssertionError("1Couldn't find link '%s' in" \
" feed '%s'." % (blog[i], site.feed.url))
for url in urls:
print url
if not url in blog:
raise AssertionError("2Couldn't find link " \
"'%s' in feed '%s'." % (url, blog[0]))
m.del_site(blog[0])
def test11_del_sites(self):
m = Manager()
self.assertRaises(TypeError, m.del_site, [])
self.assertRaises(TypeError, m.del_site, ())
for valid in valid_urls:
m.del_site(valid[0])
def test12_logger_dir(self):
if not os.path.isdir(os.path.join(os.path.expanduser('~'), '.leolo')):
raise AssertionError("Logger dir can't be created! Check permissions.")
if __name__ == "__main__":
unittest.main()
| 46.927928
| 149
| 0.641774
|
9c44d66b2e738e52d6eb13c568ddd05a523d5a13
| 5,091
|
py
|
Python
|
poetry/console/commands/add.py
|
mgasner/poetry
|
44221689e05feb0cc93c231096334f8eefbf86fc
|
[
"MIT"
] | null | null | null |
poetry/console/commands/add.py
|
mgasner/poetry
|
44221689e05feb0cc93c231096334f8eefbf86fc
|
[
"MIT"
] | null | null | null |
poetry/console/commands/add.py
|
mgasner/poetry
|
44221689e05feb0cc93c231096334f8eefbf86fc
|
[
"MIT"
] | null | null | null |
from cleo import argument
from cleo import option
from .init import InitCommand
from .env_command import EnvCommand
class AddCommand(EnvCommand, InitCommand):
name = "add"
description = "Add a new dependency to <comment>pyproject.toml</>."
arguments = [argument("name", "Packages to add.", multiple=True)]
options = [
option("dev", "D", "Add package as development dependency."),
option(
"extras",
"E",
"Extras to activate for the dependency.",
flag=False,
multiple=True,
),
option("optional", None, "Add as an optional dependency."),
option(
"python",
None,
"Python version for which the dependencies must be installed.",
flag=False,
),
option(
"platform",
None,
"Platforms for which the dependencies must be installed.",
flag=False,
),
option("allow-prereleases", None, "Accept prereleases."),
option(
"dry-run",
None,
"Outputs the operations but will not execute anything (implicitly enables --verbose).",
),
]
help = """The add command adds required packages to your <comment>pyproject.toml</> and installs them.
If you do not specify a version constraint, poetry will choose a suitable one based on the available package versions.
"""
loggers = ["poetry.repositories.pypi_repository"]
def handle(self):
from poetry.installation import Installer
from poetry.semver import parse_constraint
from tomlkit import inline_table
packages = self.argument("name")
is_dev = self.option("dev")
if self.option("extras") and len(packages) > 1:
raise ValueError(
"You can only specify one package " "when using the --extras option"
)
section = "dependencies"
if is_dev:
section = "dev-dependencies"
original_content = self.poetry.file.read()
content = self.poetry.file.read()
poetry_content = content["tool"]["poetry"]
if section not in poetry_content:
poetry_content[section] = {}
for name in packages:
for key in poetry_content[section]:
if key.lower() == name.lower():
pair = self._parse_requirements([name])[0]
if "git" in pair or pair.get("version") == "latest":
continue
raise ValueError("Package {} is already present".format(name))
requirements = self._determine_requirements(
packages, allow_prereleases=self.option("allow-prereleases")
)
for _constraint in requirements:
if "version" in _constraint:
# Validate version constraint
parse_constraint(_constraint["version"])
constraint = inline_table()
for name, value in _constraint.items():
if name == "name":
continue
constraint[name] = value
if self.option("optional"):
constraint["optional"] = True
if self.option("allow-prereleases"):
constraint["allows-prereleases"] = True
if self.option("extras"):
extras = []
for extra in self.option("extras"):
if " " in extra:
extras += [e.strip() for e in extra.split(" ")]
else:
extras.append(extra)
constraint["extras"] = self.option("extras")
if self.option("python"):
constraint["python"] = self.option("python")
if self.option("platform"):
constraint["platform"] = self.option("platform")
if len(constraint) == 1 and "version" in constraint:
constraint = constraint["version"]
poetry_content[section][_constraint["name"]] = constraint
# Write new content
self.poetry.file.write(content)
# Cosmetic new line
self.line("")
# Update packages
self.reset_poetry()
installer = Installer(
self.io, self.env, self.poetry.package, self.poetry.locker, self.poetry.pool
)
installer.dry_run(self.option("dry-run"))
installer.update(True)
installer.whitelist([r["name"] for r in requirements])
try:
status = installer.run()
except Exception:
self.poetry.file.write(original_content)
raise
if status != 0 or self.option("dry-run"):
# Revert changes
if not self.option("dry-run"):
self.error(
"\n"
"Addition failed, reverting pyproject.toml "
"to its original content."
)
self.poetry.file.write(original_content)
return status
| 31.233129
| 118
| 0.546455
|
5820256119000fd1b3c5d13e0e98080b245402ac
| 2,493
|
py
|
Python
|
snipper/utils.py
|
mesuutt/snipper
|
53c36f6b250238a517afd005e57d6eb4c964374d
|
[
"MIT"
] | 28
|
2017-01-29T00:32:42.000Z
|
2021-10-03T07:49:21.000Z
|
snipper/utils.py
|
mesuutt/snipper
|
53c36f6b250238a517afd005e57d6eb4c964374d
|
[
"MIT"
] | 4
|
2017-03-24T22:01:50.000Z
|
2021-04-11T08:54:21.000Z
|
snipper/utils.py
|
mesuutt/snipper
|
53c36f6b250238a517afd005e57d6eb4c964374d
|
[
"MIT"
] | 2
|
2021-04-03T06:45:38.000Z
|
2022-01-04T17:28:36.000Z
|
import os
import re
import sys
import json
from subprocess import Popen, PIPE
import click
if sys.version_info >= (3, 3):
from subprocess import DEVNULL
else:
DEVNULL = open(os.devnull, 'w')
def open_files(filelist):
"""Open files for upload"""
files = []
for file_path in filelist:
if not os.path.exists(file_path):
continue
filename = os.path.basename(file_path)
files.append(('file', (filename, open(file_path, 'rb'))))
return files
def slugify(text):
return re.sub(r'\W+', '-', text).lower()
def run_command(cmd):
"""Run command on shell"""
# This commands will run in background.
# So can return before process completed.
process = Popen(cmd, shell=True, stderr=PIPE, stdout=DEVNULL, universal_newlines=True)
# If you want to sure process completed use p.wait()
# If you will check process same as `process.stderr.read()` process will wait automatically.
return process
def secho(colorize, text, **kwargs):
"""Print text colorized or not colorized"""
if not colorize:
kwargs.pop('fg', None)
kwargs.pop('bg', None)
click.secho(text, **kwargs)
def get_incremented_file_path(file_path):
"""
Convert filename to incremented if exist
For example:
Convert foo/snippet.txt to foo/snippet-1.txt if foo/snippet.txt already exist.
"""
if not os.path.exists(file_path):
return file_path
dir_path, basename = os.path.split(file_path)
filename, ext = os.path.splitext(basename)
i = 1
incremented_filename = "{}-{}{}".format(filename, i, ext)
new_path = os.path.join(dir_path, incremented_filename)
while os.path.exists(new_path):
incremented_filename = "{}-{}{}".format(filename, i, ext)
new_path = os.path.join(dir_path, incremented_filename)
i += 1
return new_path
def read_metadata(config, owner=None):
"""Read meta file content"""
with open(get_owner_metadata_path(config, owner), 'r') as file:
return json.loads(file.read())
def update_metadata(config, data, owner=None):
"""Update local metadata file that keeps all snippet's data"""
with open(get_owner_metadata_path(config, owner), 'w') as file:
file.write(json.dumps(data))
def get_owner_metadata_path(config, owner=None):
if not owner:
owner = config.get('snipper', 'username')
return os.path.join(config.get('snipper', 'snippet_dir'), '{}.json'.format(owner))
| 26.521277
| 96
| 0.663859
|
b3964aa4aaafc428882b2697c245d428d2687298
| 6,541
|
py
|
Python
|
webcamviewer/src/PictureScreen.py
|
builder08/enigma2-plugins
|
45583c69b807ce26d756a51973c81d877afe3694
|
[
"OLDAP-2.3"
] | null | null | null |
webcamviewer/src/PictureScreen.py
|
builder08/enigma2-plugins
|
45583c69b807ce26d756a51973c81d877afe3694
|
[
"OLDAP-2.3"
] | null | null | null |
webcamviewer/src/PictureScreen.py
|
builder08/enigma2-plugins
|
45583c69b807ce26d756a51973c81d877afe3694
|
[
"OLDAP-2.3"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
from enigma import ePicLoad, eTimer, getDesktop
from Screens.Screen import Screen
from Components.AVSwitch import AVSwitch
from Components.config import config
from Components.Pixmap import Pixmap
from Components.ActionMap import ActionMap
from .FTPDownloader import FTPDownloader
from twisted.web.client import HTTPDownloader
from twisted.internet import reactor
from urlparse import urlparse, urlunparse
def _parse(url, defaultPort = None):
url = url.strip()
parsed = urlparse(url)
scheme = parsed[0]
path = urlunparse(('', '')+parsed[2:])
if defaultPort is None:
if scheme == 'https':
defaultPort = 443
elif scheme == 'ftp':
defaultPort = 21
else:
defaultPort = 80
host, port = parsed[1], defaultPort
if '@' in host:
username, host = host.split('@')
if ':' in username:
username, password = username.split(':')
else:
password = ""
else:
username = ""
password = ""
if ':' in host:
host, port = host.split(':')
port = int(port)
if path == "":
path = "/"
return scheme, host, port, path, username, password
def download(url, file, contextFactory = None, *args, **kwargs):
"""Download a remote file from http(s) or ftp.
@param file: path to file on filesystem, or file-like object.
See HTTPDownloader to see what extra args can be passed if remote file
is accessible via http or https. Both Backends should offer supportPartial.
"""
scheme, host, port, path, username, password = _parse(url)
if scheme == 'ftp':
if not (username and password):
username = 'anonymous'
password = 'my@email.com'
client = FTPDownloader(
host,
port,
path,
file,
username,
password,
*args,
**kwargs
)
return client.deferred
# We force username and password here as we lack a satisfying input method
if username and password:
from base64 import encodestring
# twisted will crash if we don't rewrite this ;-)
url = scheme + '://' + host + ':' + str(port) + path
basicAuth = encodestring("%s:%s" % (username, password))
authHeader = "Basic " + basicAuth.strip()
AuthHeaders = {"Authorization": authHeader}
if "headers" in kwargs:
kwargs["headers"].update(AuthHeaders)
else:
kwargs["headers"] = AuthHeaders
factory = HTTPDownloader(url, file, *args, **kwargs)
if scheme == 'https':
from twisted.internet import ssl
if contextFactory is None:
contextFactory = ssl.ClientContextFactory()
reactor.connectSSL(host, port, factory, contextFactory)
else:
reactor.connectTCP(host, port, factory)
return factory.deferred
class PictureScreen(Screen):
skin = ""
processing = False # if fetching or converting is active
autoreload = False
def __init__(self, session,title,filename, slideshowcallback = None,args=0):
self.slideshowcallback=slideshowcallback
self.screentitle = title
self.filename = filename
size_w = getDesktop(0).size().width()
size_h = getDesktop(0).size().height()
self.skin = """
<screen position="0,0" size="%i,%i" title="%s" flags=\"wfNoBorder\">
<widget name="pixmap" position="0,0" size="%i,%i" backgroundColor=\"black\"/>
</screen>""" % (size_w, size_h, filename, size_w, size_h)
Screen.__init__(self, session)
self.picload = ePicLoad()
self.picload.PictureData.get().append(self.setPictureCB)
sc = AVSwitch().getFramebufferScale()
self.picload.setPara((size_w, size_h, sc[0], sc[1], False, 1, '#ff000000'))
self["pixmap"] = Pixmap()
self.paused = False
self["actions"] = ActionMap(["WizardActions", "DirectionActions", "ChannelSelectBaseActions", "ShortcutActions"],
{
"ok": self.do,
"back": self.exit,
"green": self.AutoReloaderSwitch,
"yellow": self.pause,
"red": self.prev,
"blue": self.next,
}, -1)
self.onLayoutFinish.append(self.do)
def AutoReloaderSwitch(self):
if self.filename.startswith(("http://", "https://", "ftp://")):
if not self.autoreload:
self.autoreload = True
self.do()
else:
self.autoreload = False
if self.paused:
self.paused = False
self.slideshowcallback()
self.closetimer.start(int(config.plugins.pictureviewer.slideshowtime.value))
def do(self):
if self.processing:
pass
elif self.filename.startswith(("http://", "https://", "ftp://")):
self.fetchFile(self.filename)
else:
self.sourcefile = self.filename
self.setPicture(self.filename)
def exit(self):
self.cleanUP()
self.close()
def cleanUP(self):
try:
if os.path.exists("/tmp/loadedfile"):
os.remove("/tmp/loadedfile")
except:## OSerror??
pass
def fetchFile(self, url):
self.processing = True
self.setTitle("loading File")
print("fetching URL", url)
self.sourcefile = "/tmp/loadedfile"
download(url, self.sourcefile).addCallback(self.fetchFinished).addErrback(self.fetchFailed)
def fetchFailed(self, string):
print("fetch failed", string)
self.setTitle("fetch failed: "+string)
def fetchFinished(self, string):
print("fetching finished")
self.setPicture(self.sourcefile)
def setPicture(self, string):
if not self.paused:
self.setTitle(self.screentitle)
else:
self.setTitle(_("pause") + ":" + self.screentitle)
self.picload.startDecode(string)
def setPictureCB(self, picInfo = None):
ptr = self.picload.getData()
if ptr is not None:
self["pixmap"].instance.setPixmap(ptr)
self.processing = False
if self.autoreload is True:
self.cleanUP()
self.do()
elif self.slideshowcallback is not None:
self.closetimer = eTimer()
self.closetimer.timeout.get().append(self.slideshowcallback)
print("waiting", config.plugins.pictureviewer.slideshowtime.value, "seconds for next picture")
if not self.paused:
self.closetimer.start(int(config.plugins.pictureviewer.slideshowtime.value))
def pause(self):
if not self.slideshowcallback:
return
if not self.paused:
self.closetimer.stop()
self.paused = True
self.setTitle(_("pause") + ":" + self.filename.split("/")[-1])
else:
self.paused = False
self.setTitle(self.filename.split("/")[-1])
self.slideshowcallback()
self.closetimer.start(int(config.plugins.pictureviewer.slideshowtime.value))
def prev(self):
if not self.slideshowcallback:
return
if not self.paused:
self.closetimer.stop()
self.paused = True
self.slideshowcallback(prev = True)
def next(self):
if not self.slideshowcallback:
return
if not self.paused:
self.closetimer.stop()
self.paused = True
self.slideshowcallback()
| 26.589431
| 115
| 0.695612
|
4fe144c0047fcbc1bf28cf1dd72dd3482f412387
| 10,076
|
py
|
Python
|
pettingzoo/mpe/_mpe_utils/simple_env.py
|
carlosluis/PettingZoo
|
aec87907777e0e62652d499b3ac96f680acf35ad
|
[
"Apache-2.0"
] | 846
|
2020-05-12T05:55:00.000Z
|
2021-10-08T19:38:40.000Z
|
pettingzoo/mpe/_mpe_utils/simple_env.py
|
carlosluis/PettingZoo
|
aec87907777e0e62652d499b3ac96f680acf35ad
|
[
"Apache-2.0"
] | 237
|
2020-04-27T06:01:39.000Z
|
2021-10-13T02:55:54.000Z
|
pettingzoo/mpe/_mpe_utils/simple_env.py
|
carlosluis/PettingZoo
|
aec87907777e0e62652d499b3ac96f680acf35ad
|
[
"Apache-2.0"
] | 126
|
2020-05-29T04:20:29.000Z
|
2021-10-13T05:31:12.000Z
|
import numpy as np
from gym import spaces
from gym.utils import seeding
from pettingzoo import AECEnv
from pettingzoo.utils import wrappers
from pettingzoo.utils.agent_selector import agent_selector
def make_env(raw_env):
def env(**kwargs):
env = raw_env(**kwargs)
if env.continuous_actions:
env = wrappers.ClipOutOfBoundsWrapper(env)
else:
env = wrappers.AssertOutOfBoundsWrapper(env)
env = wrappers.OrderEnforcingWrapper(env)
return env
return env
class SimpleEnv(AECEnv):
def __init__(self, scenario, world, max_cycles, continuous_actions=False, local_ratio=None):
super().__init__()
self.seed()
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'is_parallelizable': True
}
self.max_cycles = max_cycles
self.scenario = scenario
self.world = world
self.continuous_actions = continuous_actions
self.local_ratio = local_ratio
self.scenario.reset_world(self.world, self.np_random)
self.agents = [agent.name for agent in self.world.agents]
self.possible_agents = self.agents[:]
self._index_map = {agent.name: idx for idx, agent in enumerate(self.world.agents)}
self._agent_selector = agent_selector(self.agents)
# set spaces
self.action_spaces = dict()
self.observation_spaces = dict()
state_dim = 0
for agent in self.world.agents:
if agent.movable:
space_dim = self.world.dim_p * 2 + 1
elif self.continuous_actions:
space_dim = 0
else:
space_dim = 1
if not agent.silent:
if self.continuous_actions:
space_dim += self.world.dim_c
else:
space_dim *= self.world.dim_c
obs_dim = len(self.scenario.observation(agent, self.world))
state_dim += obs_dim
if self.continuous_actions:
self.action_spaces[agent.name] = spaces.Box(low=0, high=1, shape=(space_dim,))
else:
self.action_spaces[agent.name] = spaces.Discrete(space_dim)
self.observation_spaces[agent.name] = spaces.Box(low=-np.float32(np.inf), high=+np.float32(np.inf), shape=(obs_dim,), dtype=np.float32)
self.state_space = spaces.Box(low=-np.float32(np.inf), high=+np.float32(np.inf), shape=(state_dim,), dtype=np.float32)
self.steps = 0
self.current_actions = [None] * self.num_agents
self.viewer = None
def observation_space(self, agent):
return self.observation_spaces[agent]
def action_space(self, agent):
return self.action_spaces[agent]
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
def observe(self, agent):
return self.scenario.observation(self.world.agents[self._index_map[agent]], self.world).astype(np.float32)
def state(self):
states = tuple(self.scenario.observation(self.world.agents[self._index_map[agent]], self.world).astype(np.float32) for agent in self.possible_agents)
return np.concatenate(states, axis=None)
def reset(self):
self.scenario.reset_world(self.world, self.np_random)
self.agents = self.possible_agents[:]
self.rewards = {name: 0. for name in self.agents}
self._cumulative_rewards = {name: 0. for name in self.agents}
self.dones = {name: False for name in self.agents}
self.infos = {name: {} for name in self.agents}
self._reset_render()
self.agent_selection = self._agent_selector.reset()
self.steps = 0
self.current_actions = [None] * self.num_agents
def _execute_world_step(self):
# set action for each agent
for i, agent in enumerate(self.world.agents):
action = self.current_actions[i]
scenario_action = []
if agent.movable:
mdim = self.world.dim_p * 2 + 1
if self.continuous_actions:
scenario_action.append(action[0:mdim])
action = action[mdim:]
else:
scenario_action.append(action % mdim)
action //= mdim
if not agent.silent:
scenario_action.append(action)
self._set_action(scenario_action, agent, self.action_spaces[agent.name])
self.world.step()
global_reward = 0.
if self.local_ratio is not None:
global_reward = float(self.scenario.global_reward(self.world))
for agent in self.world.agents:
agent_reward = float(self.scenario.reward(agent, self.world))
if self.local_ratio is not None:
reward = global_reward * (1 - self.local_ratio) + agent_reward * self.local_ratio
else:
reward = agent_reward
self.rewards[agent.name] = reward
# set env action for a particular agent
def _set_action(self, action, agent, action_space, time=None):
agent.action.u = np.zeros(self.world.dim_p)
agent.action.c = np.zeros(self.world.dim_c)
if agent.movable:
# physical action
agent.action.u = np.zeros(self.world.dim_p)
if self.continuous_actions:
# Process continuous action as in OpenAI MPE
agent.action.u[0] += action[0][1] - action[0][2]
agent.action.u[1] += action[0][3] - action[0][4]
else:
# process discrete action
if action[0] == 1:
agent.action.u[0] = -1.0
if action[0] == 2:
agent.action.u[0] = +1.0
if action[0] == 3:
agent.action.u[1] = -1.0
if action[0] == 4:
agent.action.u[1] = +1.0
sensitivity = 5.0
if agent.accel is not None:
sensitivity = agent.accel
agent.action.u *= sensitivity
action = action[1:]
if not agent.silent:
# communication action
if self.continuous_actions:
agent.action.c = action[0]
else:
agent.action.c = np.zeros(self.world.dim_c)
agent.action.c[action[0]] = 1.0
action = action[1:]
# make sure we used all elements of action
assert len(action) == 0
def step(self, action):
if self.dones[self.agent_selection]:
return self._was_done_step(action)
cur_agent = self.agent_selection
current_idx = self._index_map[self.agent_selection]
next_idx = (current_idx + 1) % self.num_agents
self.agent_selection = self._agent_selector.next()
self.current_actions[current_idx] = action
if next_idx == 0:
self._execute_world_step()
self.steps += 1
if self.steps >= self.max_cycles:
for a in self.agents:
self.dones[a] = True
else:
self._clear_rewards()
self._cumulative_rewards[cur_agent] = 0
self._accumulate_rewards()
def render(self, mode='human'):
from . import rendering
if self.viewer is None:
self.viewer = rendering.Viewer(700, 700)
# create rendering geometry
if self.render_geoms is None:
# import rendering only if we need it (and don't import for headless machines)
# from gym.envs.classic_control import rendering
# from multiagent._mpe_utils import rendering
self.render_geoms = []
self.render_geoms_xform = []
for entity in self.world.entities:
geom = rendering.make_circle(entity.size)
xform = rendering.Transform()
if 'agent' in entity.name:
geom.set_color(*entity.color[:3], alpha=0.5)
else:
geom.set_color(*entity.color[:3])
geom.add_attr(xform)
self.render_geoms.append(geom)
self.render_geoms_xform.append(xform)
# add geoms to viewer
self.viewer.geoms = []
for geom in self.render_geoms:
self.viewer.add_geom(geom)
self.viewer.text_lines = []
idx = 0
for agent in self.world.agents:
if not agent.silent:
tline = rendering.TextLine(self.viewer.window, idx)
self.viewer.text_lines.append(tline)
idx += 1
alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
for idx, other in enumerate(self.world.agents):
if other.silent:
continue
if np.all(other.state.c == 0):
word = '_'
elif self.continuous_actions:
word = '[' + ",".join([f"{comm:.2f}" for comm in other.state.c]) + "]"
else:
word = alphabet[np.argmax(other.state.c)]
message = (other.name + ' sends ' + word + ' ')
self.viewer.text_lines[idx].set_text(message)
# update bounds to center around agent
all_poses = [entity.state.p_pos for entity in self.world.entities]
cam_range = np.max(np.abs(np.array(all_poses))) + 1
self.viewer.set_max_size(cam_range)
# update geometry positions
for e, entity in enumerate(self.world.entities):
self.render_geoms_xform[e].set_translation(*entity.state.p_pos)
# render to display or array
return self.viewer.render(return_rgb_array=mode == 'rgb_array')
# reset rendering assets
def _reset_render(self):
self.render_geoms = None
self.render_geoms_xform = None
def close(self):
if self.viewer is not None:
self.viewer.close()
self.viewer = None
self._reset_render()
| 36.507246
| 157
| 0.577809
|
7ed3542036d6783aed0ba3fd0b01c63c76818e7e
| 35
|
py
|
Python
|
malib/replay_buffers/rollout_buffer.py
|
wwxFromTju/malib
|
7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2
|
[
"MIT"
] | 6
|
2021-05-19T10:25:36.000Z
|
2021-12-27T03:30:33.000Z
|
malib/replay_buffers/rollout_buffer.py
|
wwxFromTju/malib
|
7cd2a4af55cf1f56da8854e26ea7a4f3782ceea2
|
[
"MIT"
] | 1
|
2021-05-29T04:51:37.000Z
|
2021-05-30T06:18:10.000Z
|
malib/replay_buffers/rollout_buffer.py
|
ying-wen/malib_deprecated
|
875338b81c4d87064ad31201f461ef742db05f25
|
[
"MIT"
] | 1
|
2021-05-31T16:16:12.000Z
|
2021-05-31T16:16:12.000Z
|
# Created by yingwen at 2019-06-30
| 17.5
| 34
| 0.742857
|
1e376cb5a9e1ca6c219093b9fed68a29ec094467
| 4,160
|
py
|
Python
|
font_pl.py
|
szajakubiak/showAirQData
|
cf89f10e4295b145be30b6772afe4ce97c51fd43
|
[
"MIT"
] | null | null | null |
font_pl.py
|
szajakubiak/showAirQData
|
cf89f10e4295b145be30b6772afe4ce97c51fd43
|
[
"MIT"
] | null | null | null |
font_pl.py
|
szajakubiak/showAirQData
|
cf89f10e4295b145be30b6772afe4ce97c51fd43
|
[
"MIT"
] | null | null | null |
font = {
'A' : '001111110010010000010010000001111110',
'Ą' : '001111110010010000010010001001111110',
'a' : '000000100000101010000101010000011110',
'ą' : '000000100000101010000101010000011110000000001',
'B' : '011111110010010010010010010001101100',
'b' : '011111110000100010000100010000011100',
'C' : '001111100010000010010000010001000100',
'Ć' : '001111100010000010110000010001000100',
'c' : '000011100000100010000100010000100010',
'ć' : '000011100000100010010100010100100010',
'D' : '011111110010000010010000010001111100',
'd' : '000011100000100010000100010011111110',
'E' : '011111110010010010010010010010000010',
'Ę' : '011111110010010010010010011010000010',
'e' : '000011100000101010000101010000011000',
'ę' : '000011100000101010000101011000011000',
'F' : '011111110010010000010010000010000000',
'f' : '000010000001111110010010000',
'G' : '001111100010000010010010010001011110',
'g' : '000011001000100101000100101000111110',
'H' : '011111110000010000000010000011111110',
'h' : '011111110000100000000100000000011110',
'I' : '010000010011111110010000010',
'i' : '000100010010111110000000010',
'J' : '000000010000000001010000001011111110010000000',
'j' : '000000010000000001000100001010111110',
'K' : '011111110000010000000101000011000110',
'k' : '011111110000001000000010100000100010',
'L' : '011111110000000010000000010000000010',
'Ł' : '011111110000010010000100010000000010',
'l' : '010000010011111110000000010',
'ł' : '010001010011111110000100010',
'M' : '011111110001000000000100000001000000011111110',
'm' : '000111110000100000000011110000100000000011110',
'N' : '011111110001100000000010000000001100011111110',
'Ń' : '011111110001100000100010000000001100011111110',
'n' : '000111110000100000000100000000011110',
'ń' : '000111110000100000010100000100011110',
'O' : '001111100010000010010000010001111100',
'Ó' : '001111100010000010110000010001111100',
'o' : '000011100000100010000100010000011100',
'ó' : '000011100000100010010100010100011100',
'P' : '011111110010010000010010000001100000',
'p' : '000111111000100100000100100000011000',
'Q' : '001111100010000010010000010001111101',
'q' : '000011000000100100000100100000111111',
'R' : '011111110010010000010010000001101110',
'r' : '000111110000010000000100000000100000',
'S' : '001100010010010010010010010010001100',
'Ś' : '001100010010010010110010010010001100',
's' : '000010010000101010000101010000100100',
'ś' : '000010010000101010010101010100100100',
'T' : '010000000010000000011111110010000000010000000',
't' : '000010000011111100000010010',
'U' : '011111100000000010000000010011111100',
'u' : '000111100000000010000000010000111110',
'V' : '011110000000001100000000010000001100011110000',
'v' : '000111000000000100000000010000000100000111000',
'W' : '011111110000000100000001000000000100011111110',
'w' : '000111110000000100000001000000000100000111110',
'X' : '011000110000101000000010000000101000011000110',
'x' : '000100010000010100000001000000010100000100010',
'Y' : '011000000000100000000011110000100000011000000',
'y' : '000111001000000101000000101000111110',
'Z' : '010000110010001010010010010011100010',
'Ż' : '010000110010001010110010010010100010011000010',
'Ź' : '010000110010001010010010010110100010011000010',
'z' : '000100110000101010000110010',
'ż' : '000100110010101010000110010',
'ź' : '000100110010101010100110010',
'1' : '001000010011111110000000010',
'2' : '001000110010001010010010010001100010',
'3' : '001000100010000010010010010001101100',
'4' : '000110000001010000010010000011111110',
'5' : '011100100010100010010100010010011100',
'6' : '001111100010100010010100010000011100',
'7' : '010000110010001000010010000011100000',
'8' : '001101100010010010010010010001101100',
'9' : '001100000010010010010010010001111100',
'0' : '001111100010000010010000010001111100',
' ' : '000000000000000000',
'.' : '000000110000000110',
',' : '000000001000000110',
'-' : '000010000000010000000010000',
'/' : '000000110000001000000010000000100000011000000',
'\\' : '011000000000100000000010000000001000000000110',
'µ' : '000000001000000010000111100000000010000000010000111110',
'²' : '010110000101010000010010000',
'³' : '100010000101010000010100000',
'°' : '001100000010010000010010000001100000'
}
| 44.731183
| 63
| 0.801923
|
88795668e56792e848879df220b3a587fe7a2837
| 761
|
py
|
Python
|
pythonHTTP/cgi-bin/find13f.py
|
terasakisatoshi/myHTML
|
1622ba7a57b7ddfba3bbbcfef1f17dcb80ddb2b8
|
[
"MIT"
] | null | null | null |
pythonHTTP/cgi-bin/find13f.py
|
terasakisatoshi/myHTML
|
1622ba7a57b7ddfba3bbbcfef1f17dcb80ddb2b8
|
[
"MIT"
] | null | null | null |
pythonHTTP/cgi-bin/find13f.py
|
terasakisatoshi/myHTML
|
1622ba7a57b7ddfba3bbbcfef1f17dcb80ddb2b8
|
[
"MIT"
] | null | null | null |
import cgi
from datetime import datetime
html_body=u"""
<html><head>
<meta http-equiv="content-type"
content="text/html;charset=utf-8">
</head>
<body>
%s
</body>
</html>"""
content=''
form=cgi.FieldStorage()
year_str=form.getvalue('year','')
if not year_str.isdigit():
content=u"input seireki"
else:
year=int(year_str)
friday13=0
for month in range(1,13):
if date.weekday()==4:
friday13==1
content+=u"%d 年%d月13日は金曜日です。" % (year,date.month)
content+=u"<br />"
if friday13:
content+=u"%d 年には合計 %d 個の金曜日があります。" % (year,friday13)
else:
content+=u"no---ne"
print("Content-type: text/html;charset=utf-8\n")
print(html_body % content).encode('utf-8')
| 21.742857
| 61
| 0.595269
|
cbd0da8d3905ff3565853a596093bd197e9d6c33
| 479
|
py
|
Python
|
cfgov/data_research/tests/test_blocks.py
|
atuggle/cfgov-refresh
|
5a9cfd92b460b9be7befb39f5845abf56857aeac
|
[
"CC0-1.0"
] | null | null | null |
cfgov/data_research/tests/test_blocks.py
|
atuggle/cfgov-refresh
|
5a9cfd92b460b9be7befb39f5845abf56857aeac
|
[
"CC0-1.0"
] | null | null | null |
cfgov/data_research/tests/test_blocks.py
|
atuggle/cfgov-refresh
|
5a9cfd92b460b9be7befb39f5845abf56857aeac
|
[
"CC0-1.0"
] | null | null | null |
from __future__ import unicode_literals
from django.test import TestCase
from v1.models import BrowsePage
class ConferenceRegistrationFormTests(TestCase):
fixtures = ['conference_registration_page.json']
def test_page_renders_using_template(self):
page = BrowsePage.objects.get(pk=99999)
request = self.client.get('/').wsgi_request
response = page.serve(request)
self.assertContains(response, 'Which sessions will you be attending?')
| 29.9375
| 78
| 0.749478
|
2ebd6393604527997f0bacb386d8b7735d096a6e
| 22,786
|
py
|
Python
|
plugins/modules/oci_waas_custom_protection_rule.py
|
LaudateCorpus1/oci-ansible-collection
|
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_waas_custom_protection_rule.py
|
LaudateCorpus1/oci-ansible-collection
|
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_waas_custom_protection_rule.py
|
LaudateCorpus1/oci-ansible-collection
|
2b1cd87b4d652a97c1ca752cfc4fdc4bdb37a7e7
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2020, 2022 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_waas_custom_protection_rule
short_description: Manage a CustomProtectionRule resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a CustomProtectionRule resource in Oracle Cloud Infrastructure
- For I(state=present), creates a new custom protection rule in the specified compartment.
- Custom protection rules allow you to create rules in addition to the rulesets provided by the Web Application Firewall service, including rules from
L(ModSecurity,https://modsecurity.org/). The syntax for custom rules is based on the ModSecurity syntax. For more information about custom protection
rules, see L(Custom Protection Rules,https://docs.cloud.oracle.com/iaas/Content/WAF/Tasks/customprotectionrules.htm).
- "This resource has the following action operations in the M(oracle.oci.oci_waas_custom_protection_rule_actions) module: change_compartment."
version_added: "2.9.0"
author: Oracle (@oracle)
options:
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the compartment in which to create the custom protection
rule.
- Required for create using I(state=present).
- Required for update when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- Required for delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
type: str
display_name:
description:
- A user-friendly name for the custom protection rule.
- Required for create using I(state=present).
- Required for update, delete when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is set.
- This parameter is updatable when C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["name"]
description:
description:
- A description for the Custom Protection rule.
- This parameter is updatable.
type: str
template:
description:
- The template text of the custom protection rule. All custom protection rules are expressed in ModSecurity Rule Language.
- Additionally, each rule must include two placeholder variables that are updated by the WAF service upon publication of the rule.
- "`id: {{id_1}}` - This field is populated with a unique rule ID generated by the WAF service which identifies a `SecRule`. More than one `SecRule`
can be defined in the `template` field of a CreateCustomSecurityRule call. The value of the first `SecRule` must be `id: {{id_1}}` and the `id`
field of each subsequent `SecRule` should increase by one, as shown in the example."
- "`ctl:ruleEngine={{mode}}` - The action to be taken when the criteria of the `SecRule` are met, either `OFF`, `DETECT` or `BLOCK`. This field is
automatically populated with the corresponding value of the `action` field of the `CustomProtectionRuleSetting` schema when the `WafConfig` is
updated."
- "*Example:*
```
SecRule REQUEST_COOKIES \\"regex matching SQL injection - part 1/2\\" \\\\
\\"phase:2, \\\\
msg:'Detects chained SQL injection attempts 1/2.', \\\\
id: {{id_1}}, \\\\
ctl:ruleEngine={{mode}}, \\\\
deny\\"
SecRule REQUEST_COOKIES \\"regex matching SQL injection - part 2/2\\" \\\\
\\"phase:2, \\\\
msg:'Detects chained SQL injection attempts 2/2.', \\\\
id: {{id_2}}, \\\\
ctl:ruleEngine={{mode}}, \\\\
deny\\"
```"
- The example contains two `SecRules` each having distinct regex expression to match the `Cookie` header value during the second input analysis
phase.
- For more information about custom protection rules, see L(Custom Protection
Rules,https://docs.cloud.oracle.com/Content/WAF/tasks/customprotectionrules.htm).
- "For more information about ModSecurity syntax, see L(Making Rules: The Basic Syntax,https://www.modsecurity.org/CRS/Documentation/making.html)."
- For more information about ModSecurity's open source WAF rules, see L(Mod Security's OWASP Core Rule Set
documentation,https://www.modsecurity.org/CRS/Documentation/index.html).
- Required for create using I(state=present).
- This parameter is updatable.
type: str
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
- This parameter is updatable.
type: dict
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
- This parameter is updatable.
type: dict
custom_protection_rule_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule. This number is generated when
the custom protection rule is added to the compartment.
- Required for update using I(state=present) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
- Required for delete using I(state=absent) when environment variable C(OCI_USE_NAME_AS_IDENTIFIER) is not set.
type: str
aliases: ["id"]
state:
description:
- The state of the CustomProtectionRule.
- Use I(state=present) to create or update a CustomProtectionRule.
- Use I(state=absent) to delete a CustomProtectionRule.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create custom_protection_rule
oci_waas_custom_protection_rule:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name: display_name_example
template: template_example
# optional
description: description_example
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Update custom_protection_rule
oci_waas_custom_protection_rule:
# required
custom_protection_rule_id: "ocid1.customprotectionrule.oc1..xxxxxxEXAMPLExxxxxx"
# optional
display_name: display_name_example
description: description_example
template: template_example
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Update custom_protection_rule using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_waas_custom_protection_rule:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name: display_name_example
# optional
description: description_example
template: template_example
freeform_tags: {'Department': 'Finance'}
defined_tags: {'Operations': {'CostCenter': 'US'}}
- name: Delete custom_protection_rule
oci_waas_custom_protection_rule:
# required
custom_protection_rule_id: "ocid1.customprotectionrule.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
- name: Delete custom_protection_rule using name (when environment variable OCI_USE_NAME_AS_IDENTIFIER is set)
oci_waas_custom_protection_rule:
# required
compartment_id: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name: display_name_example
state: absent
"""
RETURN = """
custom_protection_rule:
description:
- Details of the CustomProtectionRule resource acted upon by the current operation
returned: on success
type: complex
contains:
id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule.
returned: on success
type: str
sample: "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx"
compartment_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the custom protection rule's compartment.
returned: on success
type: str
sample: "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx"
display_name:
description:
- The user-friendly name of the custom protection rule.
returned: on success
type: str
sample: display_name_example
description:
description:
- The description of the custom protection rule.
returned: on success
type: str
sample: description_example
mod_security_rule_ids:
description:
- The auto-generated ID for the custom protection rule. These IDs are referenced in logs.
returned: on success
type: list
sample: []
template:
description:
- The template text of the custom protection rule. All custom protection rules are expressed in ModSecurity Rule Language.
- Additionally, each rule must include two placeholder variables that are updated by the WAF service upon publication of the rule.
- "`id: {{id_1}}` - This field is populated with a unique rule ID generated by the WAF service which identifies a `SecRule`. More than one
`SecRule` can be defined in the `template` field of a CreateCustomSecurityRule call. The value of the first `SecRule` must be `id: {{id_1}}`
and the `id` field of each subsequent `SecRule` should increase by one, as shown in the example."
- "`ctl:ruleEngine={{mode}}` - The action to be taken when the criteria of the `SecRule` are met, either `OFF`, `DETECT` or `BLOCK`. This field
is automatically populated with the corresponding value of the `action` field of the `CustomProtectionRuleSetting` schema when the `WafConfig`
is updated."
- "*Example:*
```
SecRule REQUEST_COOKIES \\"regex matching SQL injection - part 1/2\\" \\\\
\\"phase:2, \\\\
msg:'Detects chained SQL injection attempts 1/2.', \\\\
id: {{id_1}}, \\\\
ctl:ruleEngine={{mode}}, \\\\
deny\\"
SecRule REQUEST_COOKIES \\"regex matching SQL injection - part 2/2\\" \\\\
\\"phase:2, \\\\
msg:'Detects chained SQL injection attempts 2/2.', \\\\
id: {{id_2}}, \\\\
ctl:ruleEngine={{mode}}, \\\\
deny\\"
```"
- The example contains two `SecRules` each having distinct regex expression to match the `Cookie` header value during the second input analysis
phase.
- For more information about custom protection rules, see L(Custom Protection
Rules,https://docs.cloud.oracle.com/Content/WAF/tasks/customprotectionrules.htm).
- "For more information about ModSecurity syntax, see L(Making Rules: The Basic
Syntax,https://www.modsecurity.org/CRS/Documentation/making.html)."
- For more information about ModSecurity's open source WAF rules, see L(Mod Security's OWASP Core Rule Set
documentation,https://www.modsecurity.org/CRS/Documentation/index.html).
returned: on success
type: str
sample: template_example
lifecycle_state:
description:
- The current lifecycle state of the custom protection rule.
returned: on success
type: str
sample: CREATING
time_created:
description:
- The date and time the protection rule was created, expressed in RFC 3339 timestamp format.
returned: on success
type: str
sample: "2013-10-20T19:20:30+01:00"
freeform_tags:
description:
- Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
defined_tags:
description:
- Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
- "Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
sample: {
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"display_name": "display_name_example",
"description": "description_example",
"mod_security_rule_ids": [],
"template": "template_example",
"lifecycle_state": "CREATING",
"time_created": "2013-10-20T19:20:30+01:00",
"freeform_tags": {'Department': 'Finance'},
"defined_tags": {'Operations': {'CostCenter': 'US'}}
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.waas import WaasClient
from oci.waas.models import CreateCustomProtectionRuleDetails
from oci.waas.models import UpdateCustomProtectionRuleDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class CustomProtectionRuleHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update, get, list and delete"""
def get_possible_entity_types(self):
return super(
CustomProtectionRuleHelperGen, self
).get_possible_entity_types() + [
"customprotectionrule",
"customprotectionrules",
"waascustomprotectionrule",
"waascustomprotectionrules",
"customprotectionruleresource",
"customprotectionrulesresource",
"waas",
]
def get_module_resource_id_param(self):
return "custom_protection_rule_id"
def get_module_resource_id(self):
return self.module.params.get("custom_protection_rule_id")
def get_get_fn(self):
return self.client.get_custom_protection_rule
def get_get_model_from_summary_model(self, summary_model):
return oci_common_utils.call_with_backoff(
self.client.get_custom_protection_rule,
custom_protection_rule_id=summary_model.id,
).data
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_custom_protection_rule,
custom_protection_rule_id=self.module.params.get(
"custom_protection_rule_id"
),
)
def get_required_kwargs_for_list(self):
required_list_method_params = [
"compartment_id",
]
return dict(
(param, self.module.params[param]) for param in required_list_method_params
)
def get_optional_kwargs_for_list(self):
optional_list_method_params = ["display_name"]
return dict(
(param, self.module.params[param])
for param in optional_list_method_params
if self.module.params.get(param) is not None
and (
self._use_name_as_identifier()
or (
not self.module.params.get("key_by")
or param in self.module.params.get("key_by")
)
)
)
def list_resources(self):
required_kwargs = self.get_required_kwargs_for_list()
optional_kwargs = self.get_optional_kwargs_for_list()
kwargs = oci_common_utils.merge_dicts(required_kwargs, optional_kwargs)
return oci_common_utils.list_all_resources(
self.client.list_custom_protection_rules, **kwargs
)
def get_create_model_class(self):
return CreateCustomProtectionRuleDetails
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_custom_protection_rule,
call_fn_args=(),
call_fn_kwargs=dict(create_custom_protection_rule_details=create_details,),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.CREATE_OPERATION_KEY,
),
)
def get_update_model_class(self):
return UpdateCustomProtectionRuleDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_custom_protection_rule,
call_fn_args=(),
call_fn_kwargs=dict(
custom_protection_rule_id=self.module.params.get(
"custom_protection_rule_id"
),
update_custom_protection_rule_details=update_details,
),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.UPDATE_OPERATION_KEY,
),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_custom_protection_rule,
call_fn_args=(),
call_fn_kwargs=dict(
custom_protection_rule_id=self.module.params.get(
"custom_protection_rule_id"
),
),
waiter_type=oci_wait_utils.LIFECYCLE_STATE_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_wait_for_states_for_operation(
oci_common_utils.DELETE_OPERATION_KEY,
),
)
CustomProtectionRuleHelperCustom = get_custom_class("CustomProtectionRuleHelperCustom")
class ResourceHelper(CustomProtectionRuleHelperCustom, CustomProtectionRuleHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
compartment_id=dict(type="str"),
display_name=dict(aliases=["name"], type="str"),
description=dict(type="str"),
template=dict(type="str"),
freeform_tags=dict(type="dict"),
defined_tags=dict(type="dict"),
custom_protection_rule_id=dict(aliases=["id"], type="str"),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="custom_protection_rule",
service_client_class=WaasClient,
namespace="waas",
)
result = dict(changed=False)
if resource_helper.is_delete_using_name():
result = resource_helper.delete_using_name()
elif resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update_using_name():
result = resource_helper.update_using_name()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 45.75502
| 160
| 0.628281
|
5ca85babe82895467f1becc90e836dac75875b97
| 1,442
|
py
|
Python
|
preprocessing.py
|
kuparez/headline_generator
|
2a44af30beaa0da21952bce7318e2947883873c4
|
[
"MIT"
] | null | null | null |
preprocessing.py
|
kuparez/headline_generator
|
2a44af30beaa0da21952bce7318e2947883873c4
|
[
"MIT"
] | null | null | null |
preprocessing.py
|
kuparez/headline_generator
|
2a44af30beaa0da21952bce7318e2947883873c4
|
[
"MIT"
] | null | null | null |
import re
from typing import Union, List
from bpemb import BPEmb
class BasicHtmlPreprocessor:
"""replace all html tags end entities with space"""
def __init__(self):
self.regexp = re.compile(r'&[\w\d]+;|<([^>]+)>')
def transform(self, text: Union[str, List[str]]):
if type(text) == str:
new_text = text
for r in self.regexp.finditer(text):
new_text = new_text.replace(r[0], ' ')
new_text = ' '.join([' '.join([t for t in sent.split(' ') if t])
for sent in new_text.lower().strip().split('\n')])
return new_text
elif type(text) == list:
return [self.transform(t) for t in text]
else:
raise TypeError(f'Type {type(text)} is not supported. `text` should be `list` or `str`')
class BPETokenizer:
"""Use byte pair encoding to transform text"""
def __init__(self, lang='ru', pretrained=True, vocab_size=100000, dim=300):
self.lang = lang
self.pretrained = pretrained
self.bpe = BPEmb(lang=self.lang, vs=vocab_size, dim=dim, vs_fallback=True)
def fit(self, text):
raise NotImplementedError('fit is not supported')
def transform(self, text: Union[str, List[str]], get_ids=True):
if get_ids:
return self.bpe.encode_ids_with_bos_eos(text)
else:
return self.bpe.encode_with_bos_eos(text)
| 32.044444
| 100
| 0.590846
|
aa18a169a10977db1191b960ccfda9231dfb6bd4
| 183
|
py
|
Python
|
everest/tests/__init__.py
|
helixyte/everest
|
70c9b93c3061db5cb62428349d18b8fb8566411b
|
[
"MIT"
] | 3
|
2015-03-10T17:38:25.000Z
|
2017-04-29T03:47:06.000Z
|
everest/tests/__init__.py
|
helixyte/everest
|
70c9b93c3061db5cb62428349d18b8fb8566411b
|
[
"MIT"
] | 1
|
2015-03-02T16:02:41.000Z
|
2015-03-02T16:02:41.000Z
|
everest/tests/__init__.py
|
cenix/everest
|
70c9b93c3061db5cb62428349d18b8fb8566411b
|
[
"MIT"
] | 1
|
2020-07-12T22:46:59.000Z
|
2020-07-12T22:46:59.000Z
|
"""
This file is part of the everest project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Package initialization file.
Created on Nov 3, 2011.
"""
| 20.333333
| 76
| 0.765027
|
48d07ec368edb033b9c940b9fe0344bf3974e18e
| 453
|
py
|
Python
|
documents/forms.py
|
salazarpardo/redinnovacion
|
3f7c13af0af1887112a0492aea7782871fba0129
|
[
"CC-BY-3.0"
] | null | null | null |
documents/forms.py
|
salazarpardo/redinnovacion
|
3f7c13af0af1887112a0492aea7782871fba0129
|
[
"CC-BY-3.0"
] | null | null | null |
documents/forms.py
|
salazarpardo/redinnovacion
|
3f7c13af0af1887112a0492aea7782871fba0129
|
[
"CC-BY-3.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# models
from documents.models import Photo
from documents.models import File
# forms
from base.forms import BaseModelForm
class PhotoForm(BaseModelForm):
class Meta:
model = Photo
fields = ('photo',)
class FileForm(BaseModelForm):
class Meta:
model = File
fields = ('archive',)
class AttachmentForm(BaseModelForm):
class Meta:
model = File
fields = '__all__'
| 16.777778
| 36
| 0.637969
|
c94d1d288eda6386bb54f3806a53fd7c34972ab5
| 35,782
|
py
|
Python
|
tests/services/test_WalletAPIService.py
|
gcchainlabs/gcchain-core
|
88434ccbd6c47e45e701549358de4ff7198c8178
|
[
"MIT"
] | 1
|
2020-11-04T08:32:43.000Z
|
2020-11-04T08:32:43.000Z
|
tests/services/test_WalletAPIService.py
|
gcchainlabs/gcchain-core
|
88434ccbd6c47e45e701549358de4ff7198c8178
|
[
"MIT"
] | null | null | null |
tests/services/test_WalletAPIService.py
|
gcchainlabs/gcchain-core
|
88434ccbd6c47e45e701549358de4ff7198c8178
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# Distributed under the MIT software license, see the accompanying
# file LICENSE or http://www.opensource.org/licenses/mit-license.php.
from unittest import TestCase
from mock import Mock, patch
from pygclib.pygclib import bin2hstr, hstr2bin
from gc.core.misc import logger
from gc.core.AddressState import AddressState
from gc.daemon.walletd import WalletD
from gc.generated import gcwallet_pb2, gc_pb2
from gc.services.WalletAPIService import WalletAPIService
from tests.misc.helper import get_alice_xmss, get_bob_xmss, set_gc_dir, replacement_getTime
logger.initialize_default()
@patch('gc.core.misc.ntp.getTime', new=replacement_getTime)
class TestWalletAPI(TestCase):
def __init__(self, *args, **kwargs):
self.passphrase = '你好'
self.qaddress = "Q010400ff39df1ba4d1d5b8753e6d04c51c34b95b01fc3650c10ca7b296a18bdc105412c59d0b3b"
self.hex_seed = "0104008441d43524996f76236141d16b7b324323abf796e77ad" \
"7c874622a82f5744bb803f9b404d25733d0db82be7ac6f3c4cf"
self.mnemonic = "absorb drank lute brick cure evil inept group grey " \
"breed hood reefy eager depict weed image law legacy " \
"jockey calm lover freeze fact lively wide dread spiral " \
"jaguar span rinse salty pulsar violet fare"
super(TestWalletAPI, self).__init__(*args, **kwargs)
def test_addNewAddress(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.address[0], 'Q')
def test_addNewAddressWithSlaves(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddressWithSlaves(gcwallet_pb2.AddNewAddressWithSlavesReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.address[0], 'Q')
def test_addAddressFromSeed(self):
with set_gc_dir("wallet_ver1"):
qaddress = "Q010400ff39df1ba4d1d5b8753e6d04c51c34b95b01fc3650c10ca7b296a18bdc105412c59d0b3b"
hex_seed = "0104008441d43524996f76236141d16b7b324323abf796e77ad7c874622a82f5744bb803f9b404d25733d0db82be7ac6f3c4cf"
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddAddressFromSeed(gcwallet_pb2.AddAddressFromSeedReq(seed=hex_seed), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.address, qaddress)
def test_addAddressFromSeed2(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddAddressFromSeed(gcwallet_pb2.AddAddressFromSeedReq(seed=self.mnemonic), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.address, self.qaddress)
def test_listAddresses(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
address = resp.address
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.addresses[0], address)
def test_removeAddress(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
address = resp.address
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(len(resp.addresses), 1)
resp = service.RemoveAddress(gcwallet_pb2.RemoveAddressReq(address=address), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(len(resp.addresses), 0)
def test_isValidAddress(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
qaddress = "Q010400ff39df1ba4d1d5b8753e6d04c51c34b95b01fc3650c10ca7b296a18bdc105412c59d0b3b"
resp = service.IsValidAddress(gcwallet_pb2.ValidAddressReq(address=qaddress), context=None)
self.assertEqual(resp.valid, "True")
qaddress = "Q010400ff39df1ba4d1d5b8753e6d04c51c34b95b01fc3650c10ca7b296a18bdc105412c59d0b00"
resp = service.IsValidAddress(gcwallet_pb2.ValidAddressReq(address=qaddress), context=None)
self.assertEqual(resp.valid, "False")
def test_getRecoverySeeds(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
service.AddAddressFromSeed(gcwallet_pb2.AddAddressFromSeedReq(seed=self.mnemonic), context=None)
resp = service.GetRecoverySeeds(gcwallet_pb2.GetRecoverySeedsReq(address=self.qaddress), context=None)
self.assertEqual(resp.hexseed, self.hex_seed)
self.assertEqual(resp.mnemonic, self.mnemonic)
def test_getWalletInfo(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.GetWalletInfo(gcwallet_pb2.GetWalletInfoReq(), context=None)
self.assertEqual(resp.version, 1)
self.assertEqual(resp.address_count, 0)
self.assertFalse(resp.is_encrypted)
def test_relayTransferTxn(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
walletd._public_stub.IsSlave = Mock(
return_value=gc_pb2.IsSlaveResp(result=True))
walletd._public_stub.GetOTS = Mock(
return_value=gc_pb2.GetOTSResp(next_unused_ots_index=0,
unused_ots_index_found=True))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
qaddresses_to = [alice_xmss.qaddress, bob_xmss.qaddress]
amounts = [1000000000, 1000000000]
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
resp = service.RelayTransferTxn(gcwallet_pb2.RelayTransferTxnReq(addresses_to=qaddresses_to,
amounts=amounts,
fee=100000000,
master_address=None,
signer_address=qaddress,
ots_index=0), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relayTransferTxnBySlave(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddressWithSlaves(gcwallet_pb2.AddNewAddressWithSlavesReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
slaves = walletd.get_slave_list(qaddress)
addr_state.add_slave_pks_access_type(bytes(hstr2bin(slaves[0][0].pk)), 0)
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
qaddresses_to = [alice_xmss.qaddress, bob_xmss.qaddress]
amounts = [1000000000, 1000000000]
resp = service.RelayTransferTxnBySlave(
gcwallet_pb2.RelayTransferTxnBySlaveReq(addresses_to=qaddresses_to,
amounts=amounts,
fee=100000000,
master_address=qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relayMessageTxn(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
walletd._public_stub.GetOTS = Mock(
return_value=gc_pb2.GetOTSResp(next_unused_ots_index=0,
unused_ots_index_found=True))
resp = service.RelayMessageTxn(gcwallet_pb2.RelayMessageTxnReq(message=b'Hello gc!',
fee=100000000,
master_address=None,
signer_address=qaddress,
ots_index=0), context=None)
self.assertEqual(0, resp.code)
self.assertIsNotNone(resp.tx)
def test_relayMessageTxnBySlave(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddressWithSlaves(gcwallet_pb2.AddNewAddressWithSlavesReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
slaves = walletd.get_slave_list(qaddress)
addr_state.add_slave_pks_access_type(bytes(hstr2bin(slaves[0][0].pk)), 0)
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
resp = service.RelayMessageTxnBySlave(
gcwallet_pb2.RelayMessageTxnReq(message=b'Hello gc!',
fee=100000000,
master_address=qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relayTokenTxn(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
walletd._public_stub.IsSlave = Mock(
return_value=gc_pb2.IsSlaveResp(result=True))
walletd._public_stub.GetOTS = Mock(
return_value=gc_pb2.GetOTSResp(next_unused_ots_index=0,
unused_ots_index_found=True))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
qaddresses = [alice_xmss.qaddress, bob_xmss.qaddress]
amounts = [1000000000, 1000000000]
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
resp = service.RelayTokenTxn(gcwallet_pb2.RelayTokenTxnReq(symbol=b'gc',
name=b'Quantum Resistant Ledger',
owner=alice_xmss.qaddress,
decimals=5,
addresses=qaddresses,
amounts=amounts,
fee=100000000,
master_address=None,
signer_address=qaddress,
ots_index=0), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relayTokenTxnBySlave(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddressWithSlaves(gcwallet_pb2.AddNewAddressWithSlavesReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
slaves = walletd.get_slave_list(qaddress)
addr_state.add_slave_pks_access_type(bytes(hstr2bin(slaves[0][0].pk)), 0)
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
qaddresses = [alice_xmss.qaddress, bob_xmss.qaddress]
amounts = [1000000000, 1000000000]
resp = service.RelayTokenTxnBySlave(
gcwallet_pb2.RelayTokenTxnBySlaveReq(symbol=b'gc',
name=b'Quantum Resistant Ledger',
owner=alice_xmss.qaddress,
decimals=5,
addresses=qaddresses,
amounts=amounts,
fee=100000000,
master_address=qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relayTransferTokenTxn(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
walletd._public_stub.IsSlave = Mock(
return_value=gc_pb2.IsSlaveResp(result=True))
walletd._public_stub.GetOTS = Mock(
return_value=gc_pb2.GetOTSResp(next_unused_ots_index=0,
unused_ots_index_found=True))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
qaddresses_to = [alice_xmss.qaddress, bob_xmss.qaddress]
amounts = [1000000000, 1000000000]
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
resp = service.RelayTransferTokenTxn(gcwallet_pb2.RelayTransferTokenTxnReq(addresses_to=qaddresses_to,
amounts=amounts,
token_txhash='',
fee=100000000,
master_address=None,
signer_address=qaddress,
ots_index=0), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relayTransferTokenTxnBySlave(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddressWithSlaves(gcwallet_pb2.AddNewAddressWithSlavesReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
slaves = walletd.get_slave_list(qaddress)
addr_state.add_slave_pks_access_type(bytes(hstr2bin(slaves[0][0].pk)), 0)
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
alice_xmss = get_alice_xmss(4)
bob_xmss = get_bob_xmss(4)
qaddresses_to = [alice_xmss.qaddress, bob_xmss.qaddress]
amounts = [1000000000, 1000000000]
resp = service.RelayTransferTokenTxnBySlave(
gcwallet_pb2.RelayTransferTokenTxnBySlaveReq(addresses_to=qaddresses_to,
amounts=amounts,
token_txhash='',
fee=100000000,
master_address=qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relaySlaveTxn(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
resp = service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
walletd._public_stub.IsSlave = Mock(
return_value=gc_pb2.IsSlaveResp(result=True))
walletd._public_stub.GetOTS = Mock(
return_value=gc_pb2.GetOTSResp(next_unused_ots_index=0,
unused_ots_index_found=True))
alice_xmss = get_alice_xmss(4)
slave_pks = [alice_xmss.pk]
access_types = [0]
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
resp = service.RelaySlaveTxn(gcwallet_pb2.RelaySlaveTxnReq(slave_pks=slave_pks,
access_types=access_types,
fee=100000000,
master_address=None,
signer_address=qaddress,
ots_index=0), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_relaySlaveTxnBySlave(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
walletd._public_stub.PushTransaction = Mock(
return_value=gc_pb2.PushTransactionResp(error_code=gc_pb2.PushTransactionResp.SUBMITTED))
service = WalletAPIService(walletd)
resp = service.AddNewAddressWithSlaves(gcwallet_pb2.AddNewAddressWithSlavesReq(), context=None)
qaddress = resp.address
addr_state = AddressState.get_default(walletd.qaddress_to_address(qaddress))
slaves = walletd.get_slave_list(qaddress)
addr_state.add_slave_pks_access_type(bytes(hstr2bin(slaves[0][0].pk)), 0)
walletd._public_stub.GetAddressState = Mock(
return_value=gc_pb2.GetAddressStateResp(state=addr_state.pbdata))
alice_xmss = get_alice_xmss(4)
slave_pks = [alice_xmss.pk]
access_types = [0]
resp = service.RelaySlaveTxnBySlave(
gcwallet_pb2.RelaySlaveTxnBySlaveReq(slave_pks=slave_pks,
access_types=access_types,
fee=100000000,
master_address=qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
def test_encryptWallet(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
resp = service.EncryptWallet(gcwallet_pb2.EncryptWalletReq(), context=None)
self.assertEqual(resp.code, 1)
resp = service.EncryptWallet(gcwallet_pb2.EncryptWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.EncryptWallet(gcwallet_pb2.EncryptWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 1)
def test_lockWallet(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(len(resp.addresses), 1)
resp = service.EncryptWallet(gcwallet_pb2.EncryptWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 1)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 0)
resp = service.LockWallet(gcwallet_pb2.LockWalletReq(), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 1)
def test_unlockWallet(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(len(resp.addresses), 1)
resp = service.EncryptWallet(gcwallet_pb2.EncryptWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 1)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 0)
resp = service.LockWallet(gcwallet_pb2.LockWalletReq(), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 1)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(), context=None)
self.assertEqual(resp.code, 1)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(passphrase="wrong"), context=None)
self.assertEqual(resp.code, 1)
def test_changePassphrase(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
service.AddNewAddress(gcwallet_pb2.AddNewAddressReq(), context=None)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(len(resp.addresses), 1)
resp = service.EncryptWallet(gcwallet_pb2.EncryptWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 1)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.ListAddresses(gcwallet_pb2.ListAddressesReq(), context=None)
self.assertEqual(resp.code, 0)
resp = service.LockWallet(gcwallet_pb2.LockWalletReq(), context=None)
self.assertEqual(resp.code, 0)
new_passphrase = "Hello World"
resp = service.ChangePassphrase(
gcwallet_pb2.ChangePassphraseReq(oldPassphrase=self.passphrase,
newPassphrase=new_passphrase), context=None)
self.assertEqual(resp.code, 0)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(passphrase=self.passphrase), context=None)
self.assertEqual(resp.code, 1)
resp = service.UnlockWallet(gcwallet_pb2.UnlockWalletReq(passphrase=new_passphrase), context=None)
self.assertEqual(resp.code, 0)
def test_getTransactionsByAddress(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
walletd._public_stub.GetMiniTransactionsByAddress = Mock(
return_value=gc_pb2.GetMiniTransactionsByAddressResp(mini_transactions=[],
balance=0))
resp = service.GetTransactionsByAddress(
gcwallet_pb2.TransactionsByAddressReq(address=get_alice_xmss(4).qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(len(resp.mini_transactions), 0)
self.assertEqual(resp.balance, 0)
def test_getTransaction(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
tx = gc_pb2.Transaction()
tx.fee = 10
tx.transaction_hash = b'1234'
tx.message.message_hash = b'hello'
pk = '01020016ecb9f39b9f4275d5a49e232346a15ae2fa8c50a2927daeac189b8c5f2d1' \
'8bc4e3983bd564298c49ae2e7fa6e28d4b954d8cd59398f1225b08d6144854aee0e'
tx.public_key = bytes(hstr2bin(pk))
walletd._public_stub.GetTransaction = Mock(
return_value=gc_pb2.GetTransactionResp(tx=tx, confirmations=10))
resp = service.GetTransaction(gcwallet_pb2.TransactionReq(tx_hash=tx.transaction_hash), context=None)
self.assertEqual(resp.code, 0)
self.assertIsNotNone(resp.tx)
self.assertEqual(resp.tx.transaction_hash, bin2hstr(tx.transaction_hash))
self.assertEqual(resp.confirmations, "10")
def test_getBalance(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
walletd._public_stub.GetBalance = Mock(
return_value=gc_pb2.GetBalanceResp(balance=1000))
resp = service.GetBalance(gcwallet_pb2.BalanceReq(address=self.qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.balance, "1000")
def test_getTotalBalance(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
walletd._public_stub.GetTotalBalance = Mock(
return_value=gc_pb2.GetTotalBalanceResp(balance=6000))
resp = service.GetTotalBalance(gcwallet_pb2.TotalBalanceReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.balance, "6000")
def test_getOTS(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
ots_bitfield_by_page = gc_pb2.OTSBitfieldByPage(ots_bitfield=[b'\x00'] * 10,
page_number=1)
walletd._public_stub.GetOTS = Mock(
return_value=gc_pb2.GetOTSResp(ots_bitfield_by_page=[ots_bitfield_by_page],
next_unused_ots_index=1,
unused_ots_index_found=True))
resp = service.GetOTS(gcwallet_pb2.OTSReq(address=self.qaddress), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(len(resp.ots_bitfield_by_page), 1)
self.assertEqual(resp.ots_bitfield_by_page[0], ots_bitfield_by_page)
self.assertEqual(resp.next_unused_ots_index, 1)
def test_getHeight(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
walletd._public_stub.GetHeight = Mock(
return_value=gc_pb2.GetHeightResp(height=1001))
resp = service.GetHeight(gcwallet_pb2.HeightReq(), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.height, 1001)
def test_getBlock(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
block = gc_pb2.Block()
block.header.hash_header = b'001122'
block.header.block_number = 1
walletd._public_stub.GetBlock = Mock(
return_value=gc_pb2.GetBlockResp(block=block))
resp = service.GetBlock(gcwallet_pb2.BlockReq(header_hash=b'001122'), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.block.header.hash_header, bin2hstr(block.header.hash_header))
self.assertEqual(resp.block.header.block_number, block.header.block_number)
def test_getBlockByNumber(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
block = gc_pb2.Block()
block.header.hash_header = b'001122'
block.header.block_number = 1
walletd._public_stub.GetBlockByNumber = Mock(
return_value=gc_pb2.GetBlockResp(block=block))
resp = service.GetBlockByNumber(gcwallet_pb2.BlockByNumberReq(block_number=1), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.block.header.hash_header, bin2hstr(block.header.hash_header))
self.assertEqual(resp.block.header.block_number, block.header.block_number)
def test_getAddressFromPK(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
pk = '01020016ecb9f39b9f4275d5a49e232346a15ae2fa8c50a2927daeac189b8c5f2d1' \
'8bc4e3983bd564298c49ae2e7fa6e28d4b954d8cd59398f1225b08d6144854aee0e'
resp = service.GetAddressFromPK(gcwallet_pb2.AddressFromPKReq(pk=pk), context=None)
self.assertEqual(resp.code, 0)
self.assertEqual(resp.address,
'Q010200670246b0026436b717f199e3ec5320ba6ab61d5eddff811ac199a9e9b871d3280178b343')
def test_getNodeInfo(self):
with set_gc_dir("wallet_ver1"):
walletd = WalletD()
service = WalletAPIService(walletd)
block_last_hash_str = 'c23f47a10a8c53cc5ded096369255a32c4a218682a961d0ee7db22c500000000'
version = "1.0.0"
num_connections = 10
num_known_peers = 200
uptime = 10000
block_height = 102345
block_last_hash = bytes(hstr2bin(block_last_hash_str))
network_id = "network id"
node_info = gc_pb2.NodeInfo(version=version,
num_connections=num_connections,
num_known_peers=num_known_peers,
uptime=uptime,
block_height=block_height,
block_last_hash=block_last_hash,
network_id=network_id)
walletd._public_stub.GetNodeState = Mock(
return_value=gc_pb2.GetNodeStateResp(info=node_info))
resp = service.GetNodeInfo(gcwallet_pb2.NodeInfoReq(), context=None)
self.assertEqual(resp.version, version)
self.assertEqual(resp.num_connections, str(num_connections))
self.assertEqual(resp.num_known_peers, str(num_known_peers))
self.assertEqual(resp.uptime, uptime)
self.assertEqual(resp.block_height, block_height)
self.assertEqual(resp.block_last_hash, block_last_hash_str)
self.assertEqual(resp.network_id, network_id)
| 47.709333
| 127
| 0.598709
|
397b426bfe38ca51e88d903025ca8888da5e7753
| 474
|
py
|
Python
|
pos_product_available/models/models.py
|
nahualventure/pos-addons
|
3c911c28c259967fb74e311ddcc8e6ca032c005d
|
[
"MIT"
] | null | null | null |
pos_product_available/models/models.py
|
nahualventure/pos-addons
|
3c911c28c259967fb74e311ddcc8e6ca032c005d
|
[
"MIT"
] | 1
|
2020-10-26T17:22:38.000Z
|
2020-10-26T18:58:28.000Z
|
pos_product_available/models/models.py
|
nahualventure/pos-addons
|
3c911c28c259967fb74e311ddcc8e6ca032c005d
|
[
"MIT"
] | 1
|
2021-09-05T19:44:25.000Z
|
2021-09-05T19:44:25.000Z
|
# Copyright 2019 Kolushov Alexandr <https://it-projects.info/team/KolushovAlexandr>
# License MIT (https://opensource.org/licenses/MIT).
from odoo import fields, models
class PosConfig(models.Model):
_inherit = "pos.config"
show_qtys = fields.Boolean(
"Show Product Qtys", help="Show Product Qtys in POS", default=True
)
default_location_src_id = fields.Many2one(
"stock.location", related="picking_type_id.default_location_src_id"
)
| 29.625
| 83
| 0.721519
|
a01bfae79bf24be9d48eb0c8c9d981073144e56b
| 1,131
|
py
|
Python
|
profiles_api/serializers.py
|
Sparrow-veer/profiles-rest-api
|
7194c412f80ec7cbc88a37e10225320f25c13caf
|
[
"MIT"
] | null | null | null |
profiles_api/serializers.py
|
Sparrow-veer/profiles-rest-api
|
7194c412f80ec7cbc88a37e10225320f25c13caf
|
[
"MIT"
] | null | null | null |
profiles_api/serializers.py
|
Sparrow-veer/profiles-rest-api
|
7194c412f80ec7cbc88a37e10225320f25c13caf
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from profiles_api import models
class HelloSerializer(serializers.Serializer):
"""Serializes a name field for testing our APIView"""
name=serializers.CharField(max_length=10)
class UserProfileSerializer(serializers.ModelSerializer):
"""Serializes a user profile object"""
class Meta:
model=models.UserProfile
fields=('id','email','name','password')
extra_kwargs={
'password':{
'write_only':True,
'style':{'input_type':'password'}
}
}
def create(self,validated_data):
"""create and return new user"""
user=models.UserProfile.objects.create_user(
email=validated_data['email'],
name=validated_data['name'],
password=validated_data['password']
)
return user
def update(self, instance, validated_data):
"""Handle updating user account"""
if 'password' in validated_data:
password = validated_data.pop('password')
instance.set_password(password)
return super().update(instance, validated_data)
| 27.585366
| 58
| 0.648099
|
0fa3aeb8ff8015c3b08bffd0b1c827ce31b2eaa3
| 580
|
py
|
Python
|
O4/_19_imputing_missing_values/doc.py
|
ShAlireza/ML-Tries
|
4516be7a3275c9bdedd7bd258800be384b6b34f0
|
[
"MIT"
] | null | null | null |
O4/_19_imputing_missing_values/doc.py
|
ShAlireza/ML-Tries
|
4516be7a3275c9bdedd7bd258800be384b6b34f0
|
[
"MIT"
] | null | null | null |
O4/_19_imputing_missing_values/doc.py
|
ShAlireza/ML-Tries
|
4516be7a3275c9bdedd7bd258800be384b6b34f0
|
[
"MIT"
] | null | null | null |
"""
Often, the removal of training examples or dropping of entire feature
columns is simply not feasible, because we might lose too much valuable
data. In this case, we can use different interpolation techniques to
estimate the missing values from the other training examples in our
dataset. One of the most common interpolation techniques is mean
imputation, where we simply replace the missing value with the mean value
of the entire feature column. A convenient way to achieve this is by using
the SimpleImputer class from scikit-learn.
"""
| 38.666667
| 78
| 0.760345
|
34a7309fb52b7640b948a53aab5e6c7e69577b1f
| 8,196
|
py
|
Python
|
AutomatedTesting/Gem/PythonTests/Physics/tests/collider/Collider_SameCollisionGroupSameLayerCollide.py
|
BreakerOfThings/o3de
|
f4c59f868c726470ec910623facd836047d059c3
|
[
"Apache-2.0",
"MIT"
] | 11
|
2021-07-08T09:58:26.000Z
|
2022-03-17T17:59:26.000Z
|
AutomatedTesting/Gem/PythonTests/Physics/tests/collider/Collider_SameCollisionGroupSameLayerCollide.py
|
RoddieKieley/o3de
|
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
|
[
"Apache-2.0",
"MIT"
] | 29
|
2021-07-06T19:33:52.000Z
|
2022-03-22T10:27:49.000Z
|
AutomatedTesting/Gem/PythonTests/Physics/tests/collider/Collider_SameCollisionGroupSameLayerCollide.py
|
RoddieKieley/o3de
|
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
|
[
"Apache-2.0",
"MIT"
] | 4
|
2021-07-06T19:24:43.000Z
|
2022-03-31T12:42:27.000Z
|
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
# Test case ID : C4976242
# Test Case Title : Assign same collision layer and same collision group to two entities and
# verify that they collide or not
# fmt: off
class Tests():
enter_game_mode = ("Entered game mode", "Failed to enter game mode")
find_moving = ("Moving entity found", "Moving entity not found")
find_stationary = ("Stationary entity found", "Stationary entity not found")
find_terrain = ("Terrain entity found", "Terrain entity not found")
stationary_above_terrain = ("Stationary is above terrain", "Stationary is not above terrain")
moving_above_stationary = ("Moving is above stationary", "Moving is not above stationary")
gravity_works = ("Moving Sphere fell down", "Moving Sphere did not fall")
collisions = ("Collision occurred in between entities", "Collision did not occur between entities")
falls_below_terrain_height = ("Moving is below terrain", "Moving did not fall below terrain before timeout")
exit_game_mode = ("Exited game mode", "Couldn't exit game mode")
# fmt: on
def Collider_SameCollisionGroupSameLayerCollide():
"""
Summary:
Open a Project that already has two entities with same collision layer and same collision group and verify collision
Level Description:
Moving and Stationary entities are created in level with same collision layer and same collision group.
Moving entity is placed above the Stationary entity.Terrain is placed below the Stationary entity.
So Moving and Stationary entities collide with each other and they go through terrain after collision.
Expected Behavior:
The Moving and Stationary entities should collide with each other.After Collision,they go through terrain.
Test Steps:
1) Open level and Enter game mode
2) Retrieve and validate Entities
3) Get the starting z position of the Moving entity,Stationary entity and Terrain
4) Check and report that the entities are at the correct heights before collision
5) Check that the gravity works and the Moving entity falls down
6) Check Spheres collide only with each other, but not with terrain
7) Check Moving Entity should be below terrain after collision
8) Exit game mode
9) Close the editor
Note:
- This test file must be called from the Open 3D Engine Editor command terminal
- Any passed and failed tests are written to the Editor.log file.
Parsing the file or running a log_monitor are required to observe the test results.
:return: None
"""
import os
import sys
from editor_python_test_tools.utils import Report
from editor_python_test_tools.utils import TestHelper as helper
import azlmbr.legacy.general as general
import azlmbr.bus
# Constants
TIMEOUT = 2.0
TERRAIN_HEIGHT = 32.0 # Default height of the terrain
MIN_BELOW_TERRAIN = 0.5 # Minimum height below terrain the sphere must be in order to be 'under' it
CLOSE_ENOUGH_THRESHOLD = 0.0001
helper.init_idle()
# 1) Open level and Enter game mode
helper.open_level("Physics", "Collider_SameCollisionGroupSameLayerCollide")
helper.enter_game_mode(Tests.enter_game_mode)
# 2) Retrieve and validate Entities
moving_id = general.find_game_entity("Sphere_Moving")
Report.critical_result(Tests.find_moving, moving_id.IsValid())
stationary_id = general.find_game_entity("Sphere_Stationary")
Report.critical_result(Tests.find_stationary, stationary_id.IsValid())
terrain_id = general.find_game_entity("Terrain")
Report.critical_result(Tests.find_terrain, terrain_id.IsValid())
# 3) Get the starting z position of the Moving entity,Stationary entity and Terrain
class Sphere:
"""
Class to hold values for test checks.
Attributes:
start_position_z: The initial z position of the sphere
position_z : The z position of the sphere
fell : When the sphere falls any distance below its original position, the value should be set True
below_terrain : When the box falls below the specified terrain height, the value should be set True
"""
start_position_z = None
position_z = None
fell = False
below_terrain = False
Sphere.start_position_z = azlmbr.components.TransformBus(azlmbr.bus.Event, "GetWorldZ", moving_id)
stationary_start_z = azlmbr.components.TransformBus(azlmbr.bus.Event, "GetWorldZ", stationary_id)
# 4)Check and report that the entities are at the correct heights before collision
Report.info(
"Terrain Height: {} \n Stationary Sphere height: {} \n Moving Sphere height: {}".format(
TERRAIN_HEIGHT, stationary_start_z, Sphere.start_position_z
)
)
Report.result(Tests.stationary_above_terrain, TERRAIN_HEIGHT < (stationary_start_z - CLOSE_ENOUGH_THRESHOLD))
Report.result(
Tests.moving_above_stationary, stationary_start_z < (Sphere.start_position_z - CLOSE_ENOUGH_THRESHOLD)
)
# 5)Check that the gravity works and the Moving entity falls down
def sphere_fell():
if not Sphere.fell:
Sphere.position_z = azlmbr.components.TransformBus(azlmbr.bus.Event, "GetWorldZ", moving_id)
if Sphere.position_z < (Sphere.start_position_z - CLOSE_ENOUGH_THRESHOLD):
Report.info("Sphere position is now lower than the starting position")
Sphere.fell = True
return Sphere.fell
helper.wait_for_condition(sphere_fell, TIMEOUT)
Report.result(Tests.gravity_works, Sphere.fell)
# 6) Check Spheres collide only with each other, but not with terrain
class Collision:
entity_collision = False
terrain_collision = False
class CollisionHandler:
def __init__(self, id, func):
self.id = id
self.func = func
self.create_collision_handler()
def on_collision_begin(self, args):
self.func(args[0])
def create_collision_handler(self):
self.handler = azlmbr.physics.CollisionNotificationBusHandler()
self.handler.connect(self.id)
self.handler.add_callback("OnCollisionBegin", self.on_collision_begin)
def on_collision_terrain(other_id):
Collision.terrain_collision = True
Report.info("Collision occured in between Moving or Stationary entity with Terrain")
def on_moving_entity_collision(other_id):
if other_id.Equal(stationary_id):
Collision.entity_collision = True
# collision handler for entities
CollisionHandler(terrain_id, on_collision_terrain)
CollisionHandler(moving_id, on_moving_entity_collision)
# wait till timeout to check for any collisions happening in the level
helper.wait_for_condition(lambda: Collision.entity_collision, TIMEOUT)
Report.result(Tests.collisions, Collision.entity_collision and not Collision.terrain_collision)
# 7)Check Moving Entity should be below terrain after collision
def sphere_below_terrain():
if not Sphere.below_terrain:
Sphere.position_z = azlmbr.components.TransformBus(azlmbr.bus.Event, "GetWorldZ", moving_id)
if Sphere.position_z < (TERRAIN_HEIGHT - MIN_BELOW_TERRAIN):
Sphere.below_terrain = True
return Sphere.below_terrain
sphere_under_terrain = helper.wait_for_condition(sphere_below_terrain, TIMEOUT)
Report.result(Tests.falls_below_terrain_height, sphere_under_terrain)
# 8) Exit game mode
helper.exit_game_mode(Tests.exit_game_mode)
if __name__ == "__main__":
from editor_python_test_tools.utils import Report
Report.start_test(Collider_SameCollisionGroupSameLayerCollide)
| 43.595745
| 127
| 0.702294
|
73bf82c5b2e361e5008140cb1d6cbfb1b426f38a
| 724
|
py
|
Python
|
main.py
|
marvkey/Simple-Clock
|
1268c33f8390ba075ad5fefc440b874138ab4885
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
marvkey/Simple-Clock
|
1268c33f8390ba075ad5fefc440b874138ab4885
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
marvkey/Simple-Clock
|
1268c33f8390ba075ad5fefc440b874138ab4885
|
[
"Apache-2.0"
] | null | null | null |
import sys, pygame
import core,timer,AlarmClock
background_colour = (255,255,255)
(width, height) = (300, 200)
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption('clock app')
screen.fill(background_colour)
pygame.display.flip()
running = True
Gui = AlarmClock.AlarmClock()
def main():
while True:
Gui.onupdate()
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
pygame.display.update()
for temp in core.allButton:
temp.Render(screen)
for alarmClock in AlarmClock.allAlarm:
alarmClock.CheckAlarm()
pygame.quit()
if __name__ == "__main__":
main()
| 23.354839
| 49
| 0.624309
|
aee62705fb50e34fda6ce0d84d9ed10c6c8fb243
| 1,867
|
py
|
Python
|
examples/models/file/transform_interpolator.py
|
goncaloperes/bokeh
|
b857d2d17d7c19779bb0a7be2601d8238fb1d5e9
|
[
"BSD-3-Clause"
] | 1
|
2021-10-30T00:32:00.000Z
|
2021-10-30T00:32:00.000Z
|
examples/models/file/transform_interpolator.py
|
Deng-Fankang/bokeh
|
894731860c53b7c9ddd0057dee85cf064278dc0e
|
[
"BSD-3-Clause"
] | 12
|
2020-08-26T20:19:29.000Z
|
2020-08-26T20:19:52.000Z
|
examples/models/file/transform_interpolator.py
|
Deng-Fankang/bokeh
|
894731860c53b7c9ddd0057dee85cf064278dc0e
|
[
"BSD-3-Clause"
] | 2
|
2021-01-12T18:22:24.000Z
|
2021-10-30T00:32:02.000Z
|
from bokeh.models import (Column, ColumnDataSource, CustomJS,
LinearInterpolator, Select, StepInterpolator,)
from bokeh.plotting import figure, output_file, show
output_file("transform_interpolator.html", title="Example Transforms")
N = 600
controls = ColumnDataSource(data=dict(x=[1, 2, 3, 4, 5], y=[2, 8, 7, 3, 5]))
source = ColumnDataSource(data=dict(x=[], y=[]))
linear = LinearInterpolator(x='x', y='y', data=controls)
step = StepInterpolator(x='x', y='y', data=controls)
p = figure(x_range=(0, 6), y_range=(0, 10))
p.circle(x='x', y='y', source=controls, size=15, alpha=0.5, color="firebrick")
p.circle(x='x', y='y', source=source, size=1, alpha=0.2, color="navy")
callback = CustomJS(args=dict(source=source, linear=linear, step=step, N=N), code="""
var mode = cb_obj.value;
var data = source.data;
var dx = 6 / N;
if (mode == 'None') {
data['x'] = [];
data['y'] = [];
} else {
var interp;
switch (mode) {
case 'Linear':
interp = linear;
break;
case 'Step (before)':
interp = step;
step.mode = 'before';
break;
case 'Step (center)':
interp = step;
step.mode = 'center';
break;
case 'Step (after)':
interp = step;
step.mode = 'after';
break;
}
for (var i = 0; i < N; i++) {
data['x'][i] = i * dx
data['y'][i] = interp.compute(data['x'][i])
}
}
source.change.emit()
""")
mode = Select(
title='Interpolation Mode',
value='None',
options=['None', 'Linear', 'Step (before)', 'Step (center)', 'Step (after)'],
width=300)
mode.js_on_change('value', callback)
show(Column(mode, p))
| 28.723077
| 85
| 0.521157
|
0a4ff3f5ee4324cf18ce212d62f3d2789c81f368
| 2,083
|
py
|
Python
|
OGFrogClock.py
|
salt-jp/Portfolio
|
87026d7b5c8c1393e114e8b701e2f07305cdddaf
|
[
"Apache-2.0"
] | null | null | null |
OGFrogClock.py
|
salt-jp/Portfolio
|
87026d7b5c8c1393e114e8b701e2f07305cdddaf
|
[
"Apache-2.0"
] | null | null | null |
OGFrogClock.py
|
salt-jp/Portfolio
|
87026d7b5c8c1393e114e8b701e2f07305cdddaf
|
[
"Apache-2.0"
] | null | null | null |
let sec;
let min;
let hr;
let mill;
let x = 10;
let speed = 2;
//blink check
//let check = 30;
function setup() {
createCanvas(windowWidth, windowHeight);
background(0,50,100);
}
function draw() {
fill(0,100,50);
ellipse(510 + 400, 850, 700, 200);
fill(1, 255, 100);
noStroke();
//head
ellipse(510 + 400, 450, 600 - 175, 500-175);
ellipse(370 + 390, 300, 300 - 175, 300-175);
ellipse(670 + 390, 300, 300 - 175, 300-175);
ellipse(510 + 400, 700, 700 - 175, 600-175);
//feet
arc(760, 875, 280, 180, PI, PI + HALF_PI);
arc(1060, 875, 280, 180, PI + HALF_PI, TWO_PI);
//stomach
fill('Yellow')
ellipse(510 + 400, 725, 700 - 250, 600-250);
//white eyes
fill(255);
ellipse(370 + 390, 300, 200 - 100, 200-100);
ellipse(670 + 390, 300, 200 - 100, 200-100);
//pupils
fill(0);
if(sec % 2 != 0) {
//if seconds is odd eyes are on the left
ellipse(355 + 390, 300, 100-45, 100-45);
ellipse(655 + 390, 300, 100-45, 100-45);
}
if(sec % 2 == 0) {
//if seconds are even eyes are on the right
ellipse(385 + 390, 300, 100-45, 100-45);
ellipse(685+390, 300, 100-45, 100-45);
}
//blink?
//if(min == check) {
//green color
// fill(1, 255, 100);
//white eyes
// ellipse(370 + 390, 300, 200 - 100, 200 - 100);
// ellipse(670 +390, 300, 200 - 100, 200 - 100);
//if seconds are even eyes are on the right
// ellipse(375 + 390, 300, 100-45, 100-45);
// ellipse(675+390, 300, 100-45, 100-45);
//if seconds is odd eyes are on the left
// ellipse(370 + 390, 300, 100-45, 100-45);
// ellipse(670 + 390, 300, 100-45, 100-45);
//// }
//question
//fill(0);
//nostrils
ellipse(480 + 400, 470, 20, 20);
ellipse(540 + 400, 470, 20, 20);
sec = second();
min = minute();
hr = hour();
{
}
//text
fill(255, 153, 51);
textSize(185);
text(hr ,805, 775);
{
{
fill(255, 153, 51);
textSize(90);
text(min, 860,850);
}
//}
// ellipse(x, height/2, 20, 20);
//x = x + speed;
//if(x > width - 10 || x < 10){
// speed = -speed;
//}
}}
| 16.273438
| 50
| 0.551128
|
dd0d1e45a6b03586912ed2af58c3ca9521a86418
| 618
|
py
|
Python
|
migrations/versions/4b01613bfbed_.py
|
J4LP/J4OAuth
|
ca757958f1e7069f08e0dae3becd70b90507c871
|
[
"MIT"
] | 1
|
2015-12-15T03:17:15.000Z
|
2015-12-15T03:17:15.000Z
|
migrations/versions/4b01613bfbed_.py
|
J4LP/J4OAuth
|
ca757958f1e7069f08e0dae3becd70b90507c871
|
[
"MIT"
] | null | null | null |
migrations/versions/4b01613bfbed_.py
|
J4LP/J4OAuth
|
ca757958f1e7069f08e0dae3becd70b90507c871
|
[
"MIT"
] | 1
|
2015-12-15T03:17:19.000Z
|
2015-12-15T03:17:19.000Z
|
"""empty message
Revision ID: 4b01613bfbed
Revises: 406cccb640c3
Create Date: 2014-02-07 00:29:22.150808
"""
# revision identifiers, used by Alembic.
revision = '4b01613bfbed'
down_revision = '406cccb640c3'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('client', sa.Column('homepage', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('client', 'homepage')
### end Alembic commands ###
| 22.888889
| 88
| 0.697411
|
88bdfd71cc2fd839387d5e496eba73dfd06b0b0a
| 2,674
|
py
|
Python
|
venv/lib/python3.6/site-packages/flask_script/cli.py
|
aitoehigie/britecore_flask
|
eef1873dbe6b2cc21f770bc6dec783007ae4493b
|
[
"MIT"
] | null | null | null |
venv/lib/python3.6/site-packages/flask_script/cli.py
|
aitoehigie/britecore_flask
|
eef1873dbe6b2cc21f770bc6dec783007ae4493b
|
[
"MIT"
] | 1
|
2021-06-01T23:32:38.000Z
|
2021-06-01T23:32:38.000Z
|
venv/lib/python3.6/site-packages/flask_script/cli.py
|
aitoehigie/britecore_flask
|
eef1873dbe6b2cc21f770bc6dec783007ae4493b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import getpass
from ._compat import string_types, input
def prompt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and " [%s]" % default or "")
prompt += name.endswith("?") and " " or ": "
while True:
rv = input(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_pass(name, default=None):
"""
Grabs hidden (password) input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and " [%s]" % default or "")
prompt += name.endswith("?") and " " or ": "
while True:
rv = getpass.getpass(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_bool(name, default=False, yes_choices=None, no_choices=None):
"""
Grabs user input from command line and converts to boolean
value.
:param name: prompt text
:param default: default value if no input provided.
:param yes_choices: default 'y', 'yes', '1', 'on', 'true', 't'
:param no_choices: default 'n', 'no', '0', 'off', 'false', 'f'
"""
yes_choices = yes_choices or ("y", "yes", "1", "on", "true", "t")
no_choices = no_choices or ("n", "no", "0", "off", "false", "f")
while True:
rv = prompt(name, default and yes_choices[0] or no_choices[0])
if not rv:
return default
if rv.lower() in yes_choices:
return True
elif rv.lower() in no_choices:
return False
def prompt_choices(name, choices, default=None, no_choice=("none",)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices. Choices may be
single strings or (key, value) tuples.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
if isinstance(choice, string_types):
options.append(choice)
else:
options.append("%s [%s]" % (choice[1], choice[0]))
choice = choice[0]
_choices.append(choice)
while True:
rv = prompt(name + " - (%s)" % ", ".join(options), default).lower()
if rv in no_choice:
return None
if rv in _choices or rv == default:
return rv
| 28.147368
| 75
| 0.57816
|
30a1bfc29f137fac17649807e035cb117fcbb4d6
| 14,381
|
py
|
Python
|
soxs/background/point_sources.py
|
granttremblay/soxs
|
30c96ae74153cc974637b6bfb478bd56743cb90b
|
[
"BSD-3-Clause"
] | 4
|
2017-07-05T14:27:08.000Z
|
2021-04-24T16:45:08.000Z
|
soxs/background/point_sources.py
|
granttremblay/soxs
|
30c96ae74153cc974637b6bfb478bd56743cb90b
|
[
"BSD-3-Clause"
] | 7
|
2016-12-13T20:57:23.000Z
|
2020-03-20T16:55:08.000Z
|
soxs/background/point_sources.py
|
granttremblay/soxs
|
30c96ae74153cc974637b6bfb478bd56743cb90b
|
[
"BSD-3-Clause"
] | 2
|
2017-03-31T18:30:01.000Z
|
2017-08-11T20:23:56.000Z
|
import numpy as np
from soxs import write_photon_list
from soxs.constants import keV_per_erg, erg_per_keV
from soxs.spectra import get_wabs_absorb, get_tbabs_absorb
from soxs.utils import mylog, parse_prng, parse_value
from scipy.interpolate import InterpolatedUnivariateSpline
from scipy.special import erf
from astropy.table import Table
from astropy.io import ascii
# Function for computing spectral index of AGN sources
# a fit to the data from Figure 13a of Hickox & Markevitch 2006
# http://adsabs.harvard.edu/abs/2006ApJ...645...95H
# Parameters
aa = -14.0
bb = 0.5
cc = 0.5
dd = 1.8
# Here x = log10(flux)
def get_agn_index(x):
y = (x-aa)/bb
return cc*erf(y)+dd
# Index for galaxies
gal_index = 2.0
fb_emin = 0.5 # keV, low energy bound for the logN-logS flux band
fb_emax = 2.0 # keV, high energy bound for the logN-logS flux band
spec_emin = 0.1 # keV, minimum energy of mock spectrum
spec_emax = 10.0 # keV, max energy of mock spectrum
def get_flux_scale(ind, fb_emin, fb_emax, spec_emin, spec_emax):
f_g = np.log(spec_emax/spec_emin)*np.ones(ind.size)
f_E = np.log(fb_emax/fb_emin)*np.ones(ind.size)
n1 = ind != 1.0
n2 = ind != 2.0
f_g[n1] = (spec_emax**(1.0-ind[n1])-spec_emin**(1.0-ind[n1]))/(1.0-ind[n1])
f_E[n2] = (fb_emax**(2.0-ind[n2])-fb_emin**(2.0-ind[n2]))/(2.0-ind[n2])
fscale = f_g/f_E
return fscale
def generate_fluxes(exp_time, area, fov, prng):
from soxs.data import cdf_fluxes, cdf_gal, cdf_agn
exp_time = parse_value(exp_time, "s")
area = parse_value(area, "cm**2")
fov = parse_value(fov, "arcmin")
logf = np.log10(cdf_fluxes)
n_gal = np.rint(cdf_gal[-1])
n_agn = np.rint(cdf_agn[-1])
F_gal = cdf_gal / cdf_gal[-1]
F_agn = cdf_agn / cdf_agn[-1]
f_gal = InterpolatedUnivariateSpline(F_gal, logf)
f_agn = InterpolatedUnivariateSpline(F_agn, logf)
eph_mean_erg = 1.0*erg_per_keV
S_min_obs = eph_mean_erg/(exp_time*area)
mylog.debug("Flux of %g erg/cm^2/s gives roughly "
"one photon during exposure." % S_min_obs)
fov_area = fov**2
n_gal = int(n_gal*fov_area/3600.0)
n_agn = int(n_agn*fov_area/3600.0)
mylog.debug("%d AGN, %d galaxies in the FOV." % (n_agn, n_gal))
randvec1 = prng.uniform(size=n_agn)
agn_fluxes = 10**f_agn(randvec1)
randvec2 = prng.uniform(size=n_gal)
gal_fluxes = 10**f_gal(randvec2)
return agn_fluxes, gal_fluxes
def generate_sources(exp_time, fov, sky_center, area=40000.0, prng=None):
r"""
Make a catalog of point sources.
Parameters
----------
exp_time : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The exposure time of the observation in seconds.
fov : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The field of view in arcminutes.
sky_center : array-like
The center RA, Dec of the field of view in degrees.
area : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional
The effective area in cm**2. It must be large enough
so that a sufficiently large sample is drawn for the
ARF. Default: 40000.
prng : :class:`~numpy.random.RandomState` object, integer, or None
A pseudo-random number generator. Typically will only
be specified if you have a reason to generate the same
set of random numbers, such as for a test. Default is None,
which sets the seed based on the system time.
"""
prng = parse_prng(prng)
exp_time = parse_value(exp_time, "s")
fov = parse_value(fov, "arcmin")
area = parse_value(area, "cm**2")
agn_fluxes, gal_fluxes = generate_fluxes(exp_time, area, fov, prng)
fluxes = np.concatenate([agn_fluxes, gal_fluxes])
ind = np.concatenate([get_agn_index(np.log10(agn_fluxes)),
gal_index * np.ones(gal_fluxes.size)])
dec_scal = np.fabs(np.cos(sky_center[1] * np.pi / 180))
ra_min = sky_center[0] - fov / (2.0 * 60.0 * dec_scal)
dec_min = sky_center[1] - fov / (2.0 * 60.0)
ra0 = prng.uniform(size=fluxes.size) * fov / (60.0 * dec_scal) + ra_min
dec0 = prng.uniform(size=fluxes.size) * fov / 60.0 + dec_min
return ra0, dec0, fluxes, ind
def make_ptsrc_background(exp_time, fov, sky_center, absorb_model="wabs",
nH=0.05, area=40000.0, input_sources=None,
output_sources=None, prng=None):
r"""
Make a point-source background.
Parameters
----------
exp_time : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The exposure time of the observation in seconds.
fov : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The field of view in arcminutes.
sky_center : array-like
The center RA, Dec of the field of view in degrees.
absorb_model : string, optional
The absorption model to use, "wabs" or "tbabs". Default: "wabs"
nH : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional
The hydrogen column in units of 10**22 atoms/cm**2.
Default: 0.05
area : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional
The effective area in cm**2. It must be large enough
so that a sufficiently large sample is drawn for the
ARF. Default: 40000.
input_sources : string, optional
If set to a filename, input the source positions, fluxes,
and spectral indices from an ASCII table instead of generating
them. Default: None
output_sources : string, optional
If set to a filename, output the properties of the sources
within the field of view to a file. Default: None
prng : :class:`~numpy.random.RandomState` object, integer, or None
A pseudo-random number generator. Typically will only
be specified if you have a reason to generate the same
set of random numbers, such as for a test. Default is None,
which sets the seed based on the system time.
"""
prng = parse_prng(prng)
exp_time = parse_value(exp_time, "s")
fov = parse_value(fov, "arcmin")
if nH is not None:
nH = parse_value(nH, "1.0e22*cm**-2")
area = parse_value(area, "cm**2")
if input_sources is None:
ra0, dec0, fluxes, ind = generate_sources(exp_time, fov, sky_center,
area=area, prng=prng)
num_sources = fluxes.size
else:
mylog.info("Reading in point-source properties from %s." % input_sources)
t = ascii.read(input_sources)
ra0 = t["RA"].data
dec0 = t["Dec"].data
fluxes = t["flux_0.5_2.0_keV"].data
ind = t["index"].data
num_sources = fluxes.size
mylog.debug("Generating spectra from %d sources." % num_sources)
# If requested, output the source properties to a file
if output_sources is not None:
t = Table([ra0, dec0, fluxes, ind],
names=('RA', 'Dec', 'flux_0.5_2.0_keV', 'index'))
t["RA"].unit = "deg"
t["Dec"].unit = "deg"
t["flux_0.5_2.0_keV"].unit = "erg/(cm**2*s)"
t["index"].unit = ""
t.write(output_sources, format='ascii.ecsv', overwrite=True)
# Pre-calculate for optimization
eratio = spec_emax/spec_emin
oma = 1.0-ind
invoma = 1.0/oma
invoma[oma == 0.0] = 1.0
fac1 = spec_emin**oma
fac2 = spec_emax**oma-fac1
fluxscale = get_flux_scale(ind, fb_emin, fb_emax, spec_emin, spec_emax)
# Using the energy flux, determine the photon flux by simple scaling
ref_ph_flux = fluxes*fluxscale*keV_per_erg
# Now determine the number of photons we will generate
n_photons = prng.poisson(ref_ph_flux*exp_time*area)
all_energies = []
all_ra = []
all_dec = []
for i, nph in enumerate(n_photons):
if nph > 0:
# Generate the energies in the source frame
u = prng.uniform(size=nph)
if ind[i] == 1.0:
energies = spec_emin*(eratio**u)
else:
energies = fac1[i] + u*fac2[i]
energies **= invoma[i]
# Assign positions for this source
ra = ra0[i]*np.ones(nph)
dec = dec0[i]*np.ones(nph)
all_energies.append(energies)
all_ra.append(ra)
all_dec.append(dec)
mylog.debug("Finished generating spectra.")
all_energies = np.concatenate(all_energies)
all_ra = np.concatenate(all_ra)
all_dec = np.concatenate(all_dec)
all_nph = all_energies.size
# Remove some of the photons due to Galactic foreground absorption.
# We will throw a lot of stuff away, but this is more general and still
# faster.
if nH is not None:
if absorb_model == "wabs":
absorb = get_wabs_absorb(all_energies, nH)
elif absorb_model == "tbabs":
absorb = get_tbabs_absorb(all_energies, nH)
randvec = prng.uniform(size=all_energies.size)
all_energies = all_energies[randvec < absorb]
all_ra = all_ra[randvec < absorb]
all_dec = all_dec[randvec < absorb]
all_nph = all_energies.size
mylog.debug("%d photons remain after foreground galactic absorption." % all_nph)
all_flux = np.sum(all_energies)*erg_per_keV/(exp_time*area)
output_events = {"ra": all_ra, "dec": all_dec,
"energy": all_energies, "flux": all_flux}
return output_events
def make_point_sources_file(simput_prefix, phlist_prefix, exp_time, fov,
sky_center, absorb_model="wabs", nH=0.05,
area=40000.0, prng=None, append=False,
overwrite=False, input_sources=None,
output_sources=None):
"""
Make a SIMPUT catalog made up of contributions from
point sources.
Parameters
----------
simput_prefix : string
The filename prefix for the SIMPUT file.
phlist_prefix : string
The filename prefix for the photon list file.
exp_time : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The exposure time of the observation in seconds.
fov : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The field of view in arcminutes.
sky_center : array-like
The center RA, Dec of the field of view in degrees.
absorb_model : string, optional
The absorption model to use, "wabs" or "tbabs". Default: "wabs"
nH : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional
The hydrogen column in units of 10**22 atoms/cm**2.
Default: 0.05
area : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional
The effective area in cm**2. It must be large enough
so that a sufficiently large sample is drawn for the
ARF. Default: 40000.
prng : :class:`~numpy.random.RandomState` object, integer, or None
A pseudo-random number generator. Typically will only
be specified if you have a reason to generate the same
set of random numbers, such as for a test. Default is None,
which sets the seed based on the system time.
append : boolean, optional
If True, append a new source an existing SIMPUT
catalog. Default: False
overwrite : boolean, optional
Set to True to overwrite previous files. Default: False
input_sources : string, optional
If set to a filename, input the source positions, fluxes,
and spectral indices from an ASCII table instead of generating
them. Default: None
output_sources : string, optional
If set to a filename, output the properties of the sources
within the field of view to a file. Default: None
"""
events = make_ptsrc_background(exp_time, fov, sky_center,
absorb_model=absorb_model, nH=nH,
area=area, input_sources=input_sources,
output_sources=output_sources, prng=prng)
write_photon_list(simput_prefix, phlist_prefix, events["flux"],
events["ra"], events["dec"], events["energy"],
append=append, overwrite=overwrite)
def make_point_source_list(output_file, exp_time, fov, sky_center,
area=40000.0, prng=None):
r"""
Make a list of point source properties and write it to an ASCII
table file.
Parameters
----------
output_file : string
The ASCII table file to write the source properties to.
exp_time : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The exposure time of the observation in seconds.
fov : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`
The field of view in arcminutes.
sky_center : array-like
The center RA, Dec of the field of view in degrees.
area : float, (value, unit) tuple, or :class:`~astropy.units.Quantity`, optional
The effective area in cm**2. It must be large enough
so that a sufficiently large sample is drawn for the
ARF. Default: 40000.
prng : :class:`~numpy.random.RandomState` object, integer, or None
A pseudo-random number generator. Typically will only
be specified if you have a reason to generate the same
set of random numbers, such as for a test. Default is None,
which sets the seed based on the system time.
"""
ra0, dec0, fluxes, ind = generate_sources(exp_time, fov, sky_center,
area=area, prng=prng)
t = Table([ra0, dec0, fluxes, ind],
names=('RA', 'Dec', 'flux_0.5_2.0_keV', 'index'))
t["RA"].unit = "deg"
t["Dec"].unit = "deg"
t["flux_0.5_2.0_keV"].unit = "erg/(cm**2*s)"
t["index"].unit = ""
t.write(output_file, format='ascii.ecsv', overwrite=True)
| 40.739377
| 85
| 0.618733
|
edbeb25b091fa0c710b382156125c76ca1741a87
| 1,225
|
py
|
Python
|
pytest_drf/fixtures.py
|
theY4Kman/pytest-drf
|
1c86e023ebe7353e89aa71a7fc3b15457b5b20bd
|
[
"MIT"
] | 58
|
2020-02-09T07:13:57.000Z
|
2021-12-06T10:00:15.000Z
|
pytest_drf/fixtures.py
|
theY4Kman/pytest-drf
|
1c86e023ebe7353e89aa71a7fc3b15457b5b20bd
|
[
"MIT"
] | 10
|
2020-07-27T09:21:51.000Z
|
2021-09-11T20:14:45.000Z
|
pytest_drf/fixtures.py
|
theY4Kman/pytest-drf
|
1c86e023ebe7353e89aa71a7fc3b15457b5b20bd
|
[
"MIT"
] | 5
|
2020-07-27T08:39:48.000Z
|
2021-12-26T07:08:55.000Z
|
from typing import Callable, TYPE_CHECKING
import pytest
if TYPE_CHECKING:
# NOTE: APIClient forward refs used to avoid loading Django settings too early
from pytest_drf.client import DRFTestClient
from django.contrib.auth.models import User
__all__ = ['create_drf_client', 'unauthed_client']
@pytest.fixture
def create_drf_client() -> Callable[['User'], 'DRFTestClient']:
"""A method returning a test client authenticated to the passed user
To use a different test client class than the default DRF APIClient, or to
customize how users are authenticated, override this fixture with your own
implementation.
"""
# NOTE: local import used to avoid loading Django settings too early
from pytest_drf.client import DRFTestClient
def create_drf_client(user: 'User') -> 'DRFTestClient':
client = DRFTestClient()
client.force_authenticate(user=user)
return client
return create_drf_client
@pytest.fixture
def unauthed_client() -> 'DRFTestClient':
"""A DRF test client with no authentication"""
# NOTE: local import used to avoid loading Django settings too early
from pytest_drf.client import DRFTestClient
return DRFTestClient()
| 29.166667
| 82
| 0.738776
|
fed74c45428060ceb4d6fc06f4970281ca35f926
| 477,057
|
py
|
Python
|
sdk/python/pulumi_azure_native/datamigration/outputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/datamigration/outputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/datamigration/outputs.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
__all__ = [
'AzureActiveDirectoryAppResponse',
'BackupFileInfoResponse',
'BackupSetInfoResponse',
'BlobShareResponse',
'ConnectToSourcePostgreSqlSyncTaskInputResponse',
'ConnectToSourcePostgreSqlSyncTaskOutputResponse',
'ConnectToSourcePostgreSqlSyncTaskPropertiesResponse',
'ConnectToSourceSqlServerSyncTaskPropertiesResponse',
'ConnectToSourceSqlServerTaskInputResponse',
'ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse',
'ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse',
'ConnectToSourceSqlServerTaskOutputLoginLevelResponse',
'ConnectToSourceSqlServerTaskOutputTaskLevelResponse',
'ConnectToSourceSqlServerTaskPropertiesResponse',
'ConnectToTargetAzureDbForMySqlTaskInputResponse',
'ConnectToTargetAzureDbForMySqlTaskOutputResponse',
'ConnectToTargetAzureDbForMySqlTaskPropertiesResponse',
'ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse',
'ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse',
'ConnectToTargetAzureDbForPostgreSqlSyncTaskPropertiesResponse',
'ConnectToTargetSqlDbTaskInputResponse',
'ConnectToTargetSqlDbTaskOutputResponse',
'ConnectToTargetSqlDbTaskPropertiesResponse',
'ConnectToTargetSqlMISyncTaskInputResponse',
'ConnectToTargetSqlMISyncTaskOutputResponse',
'ConnectToTargetSqlMISyncTaskPropertiesResponse',
'ConnectToTargetSqlMITaskInputResponse',
'ConnectToTargetSqlMITaskOutputResponse',
'ConnectToTargetSqlMITaskPropertiesResponse',
'ConnectToTargetSqlSqlDbSyncTaskInputResponse',
'ConnectToTargetSqlSqlDbSyncTaskPropertiesResponse',
'DataIntegrityValidationResultResponse',
'DataItemMigrationSummaryResultResponse',
'DatabaseBackupInfoResponse',
'DatabaseFileInfoResponse',
'DatabaseInfoResponse',
'DatabaseSummaryResultResponse',
'DatabaseTableResponse',
'ExecutionStatisticsResponse',
'FileShareResponse',
'GetTdeCertificatesSqlTaskInputResponse',
'GetTdeCertificatesSqlTaskOutputResponse',
'GetTdeCertificatesSqlTaskPropertiesResponse',
'GetUserTablesSqlSyncTaskInputResponse',
'GetUserTablesSqlSyncTaskOutputResponse',
'GetUserTablesSqlSyncTaskPropertiesResponse',
'GetUserTablesSqlTaskInputResponse',
'GetUserTablesSqlTaskOutputResponse',
'GetUserTablesSqlTaskPropertiesResponse',
'MiSqlConnectionInfoResponse',
'MigrateMISyncCompleteCommandInputResponse',
'MigrateMISyncCompleteCommandOutputResponse',
'MigrateMISyncCompleteCommandPropertiesResponse',
'MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskInputResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseErrorResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevelResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskOutputErrorResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevelResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevelResponse',
'MigrateMySqlAzureDbForMySqlSyncTaskPropertiesResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseErrorResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevelResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputErrorResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevelResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevelResponse',
'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskPropertiesResponse',
'MigrateSqlServerSqlDbDatabaseInputResponse',
'MigrateSqlServerSqlDbSyncDatabaseInputResponse',
'MigrateSqlServerSqlDbSyncTaskInputResponse',
'MigrateSqlServerSqlDbSyncTaskOutputDatabaseErrorResponse',
'MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevelResponse',
'MigrateSqlServerSqlDbSyncTaskOutputErrorResponse',
'MigrateSqlServerSqlDbSyncTaskOutputMigrationLevelResponse',
'MigrateSqlServerSqlDbSyncTaskOutputTableLevelResponse',
'MigrateSqlServerSqlDbSyncTaskPropertiesResponse',
'MigrateSqlServerSqlDbTaskInputResponse',
'MigrateSqlServerSqlDbTaskOutputDatabaseLevelResponse',
'MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResultResponse',
'MigrateSqlServerSqlDbTaskOutputErrorResponse',
'MigrateSqlServerSqlDbTaskOutputMigrationLevelResponse',
'MigrateSqlServerSqlDbTaskOutputTableLevelResponse',
'MigrateSqlServerSqlDbTaskOutputValidationResultResponse',
'MigrateSqlServerSqlDbTaskPropertiesResponse',
'MigrateSqlServerSqlMIDatabaseInputResponse',
'MigrateSqlServerSqlMISyncTaskInputResponse',
'MigrateSqlServerSqlMISyncTaskOutputDatabaseLevelResponse',
'MigrateSqlServerSqlMISyncTaskOutputErrorResponse',
'MigrateSqlServerSqlMISyncTaskOutputMigrationLevelResponse',
'MigrateSqlServerSqlMISyncTaskPropertiesResponse',
'MigrateSqlServerSqlMITaskInputResponse',
'MigrateSqlServerSqlMITaskOutputAgentJobLevelResponse',
'MigrateSqlServerSqlMITaskOutputDatabaseLevelResponse',
'MigrateSqlServerSqlMITaskOutputErrorResponse',
'MigrateSqlServerSqlMITaskOutputLoginLevelResponse',
'MigrateSqlServerSqlMITaskOutputMigrationLevelResponse',
'MigrateSqlServerSqlMITaskPropertiesResponse',
'MigrateSyncCompleteCommandInputResponse',
'MigrateSyncCompleteCommandOutputResponse',
'MigrateSyncCompleteCommandPropertiesResponse',
'MigrationEligibilityInfoResponse',
'MigrationReportResultResponse',
'MigrationValidationDatabaseSummaryResultResponse',
'MigrationValidationOptionsResponse',
'MySqlConnectionInfoResponse',
'ODataErrorResponse',
'OrphanedUserInfoResponse',
'PostgreSqlConnectionInfoResponse',
'ProjectFilePropertiesResponse',
'QueryAnalysisValidationResultResponse',
'QueryExecutionResultResponse',
'ReportableExceptionResponse',
'SchemaComparisonValidationResultResponse',
'SchemaComparisonValidationResultTypeResponse',
'SelectedCertificateInputResponse',
'ServiceSkuResponse',
'SqlConnectionInfoResponse',
'StartMigrationScenarioServerRoleResultResponse',
'SyncMigrationDatabaseErrorEventResponse',
'ValidateMigrationInputSqlServerSqlDbSyncTaskPropertiesResponse',
'ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse',
'ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse',
'ValidateMigrationInputSqlServerSqlMISyncTaskPropertiesResponse',
'ValidateMigrationInputSqlServerSqlMITaskInputResponse',
'ValidateMigrationInputSqlServerSqlMITaskOutputResponse',
'ValidateMigrationInputSqlServerSqlMITaskPropertiesResponse',
'ValidateSyncMigrationInputSqlServerTaskInputResponse',
'ValidateSyncMigrationInputSqlServerTaskOutputResponse',
'ValidationErrorResponse',
'WaitStatisticsResponse',
]
@pulumi.output_type
class AzureActiveDirectoryAppResponse(dict):
"""
Azure Active Directory Application
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appKey":
suggest = "app_key"
elif key == "applicationId":
suggest = "application_id"
elif key == "tenantId":
suggest = "tenant_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureActiveDirectoryAppResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureActiveDirectoryAppResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureActiveDirectoryAppResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
app_key: str,
application_id: str,
tenant_id: str):
"""
Azure Active Directory Application
:param str app_key: Key used to authenticate to the Azure Active Directory Application
:param str application_id: Application ID of the Azure Active Directory Application
:param str tenant_id: Tenant id of the customer
"""
pulumi.set(__self__, "app_key", app_key)
pulumi.set(__self__, "application_id", application_id)
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="appKey")
def app_key(self) -> str:
"""
Key used to authenticate to the Azure Active Directory Application
"""
return pulumi.get(self, "app_key")
@property
@pulumi.getter(name="applicationId")
def application_id(self) -> str:
"""
Application ID of the Azure Active Directory Application
"""
return pulumi.get(self, "application_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
Tenant id of the customer
"""
return pulumi.get(self, "tenant_id")
@pulumi.output_type
class BackupFileInfoResponse(dict):
"""
Information of the backup file
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "familySequenceNumber":
suggest = "family_sequence_number"
elif key == "fileLocation":
suggest = "file_location"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in BackupFileInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
BackupFileInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
BackupFileInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
family_sequence_number: Optional[int] = None,
file_location: Optional[str] = None,
status: Optional[str] = None):
"""
Information of the backup file
:param int family_sequence_number: Sequence number of the backup file in the backup set
:param str file_location: Location of the backup file in shared folder
:param str status: Status of the backup file during migration
"""
if family_sequence_number is not None:
pulumi.set(__self__, "family_sequence_number", family_sequence_number)
if file_location is not None:
pulumi.set(__self__, "file_location", file_location)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="familySequenceNumber")
def family_sequence_number(self) -> Optional[int]:
"""
Sequence number of the backup file in the backup set
"""
return pulumi.get(self, "family_sequence_number")
@property
@pulumi.getter(name="fileLocation")
def file_location(self) -> Optional[str]:
"""
Location of the backup file in shared folder
"""
return pulumi.get(self, "file_location")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Status of the backup file during migration
"""
return pulumi.get(self, "status")
@pulumi.output_type
class BackupSetInfoResponse(dict):
"""
Information of backup set
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backupFinishedDate":
suggest = "backup_finished_date"
elif key == "backupSetId":
suggest = "backup_set_id"
elif key == "backupStartDate":
suggest = "backup_start_date"
elif key == "backupType":
suggest = "backup_type"
elif key == "databaseName":
suggest = "database_name"
elif key == "firstLsn":
suggest = "first_lsn"
elif key == "isBackupRestored":
suggest = "is_backup_restored"
elif key == "lastLsn":
suggest = "last_lsn"
elif key == "lastModifiedTime":
suggest = "last_modified_time"
elif key == "listOfBackupFiles":
suggest = "list_of_backup_files"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in BackupSetInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
BackupSetInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
BackupSetInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backup_finished_date: Optional[str] = None,
backup_set_id: Optional[str] = None,
backup_start_date: Optional[str] = None,
backup_type: Optional[str] = None,
database_name: Optional[str] = None,
first_lsn: Optional[str] = None,
is_backup_restored: Optional[bool] = None,
last_lsn: Optional[str] = None,
last_modified_time: Optional[str] = None,
list_of_backup_files: Optional[Sequence['outputs.BackupFileInfoResponse']] = None):
"""
Information of backup set
:param str backup_finished_date: Date and time that the backup operation finished
:param str backup_set_id: Id for the set of backup files
:param str backup_start_date: Date and time that the backup operation began
:param str backup_type: Enum of the different backup types
:param str database_name: Name of the database to which the backup set belongs
:param str first_lsn: First log sequence number of the backup file
:param bool is_backup_restored: Whether the backup set is restored or not
:param str last_lsn: Last log sequence number of the backup file
:param str last_modified_time: Last modified time of the backup file in share location
:param Sequence['BackupFileInfoResponse'] list_of_backup_files: List of files in the backup set
"""
if backup_finished_date is not None:
pulumi.set(__self__, "backup_finished_date", backup_finished_date)
if backup_set_id is not None:
pulumi.set(__self__, "backup_set_id", backup_set_id)
if backup_start_date is not None:
pulumi.set(__self__, "backup_start_date", backup_start_date)
if backup_type is not None:
pulumi.set(__self__, "backup_type", backup_type)
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if first_lsn is not None:
pulumi.set(__self__, "first_lsn", first_lsn)
if is_backup_restored is not None:
pulumi.set(__self__, "is_backup_restored", is_backup_restored)
if last_lsn is not None:
pulumi.set(__self__, "last_lsn", last_lsn)
if last_modified_time is not None:
pulumi.set(__self__, "last_modified_time", last_modified_time)
if list_of_backup_files is not None:
pulumi.set(__self__, "list_of_backup_files", list_of_backup_files)
@property
@pulumi.getter(name="backupFinishedDate")
def backup_finished_date(self) -> Optional[str]:
"""
Date and time that the backup operation finished
"""
return pulumi.get(self, "backup_finished_date")
@property
@pulumi.getter(name="backupSetId")
def backup_set_id(self) -> Optional[str]:
"""
Id for the set of backup files
"""
return pulumi.get(self, "backup_set_id")
@property
@pulumi.getter(name="backupStartDate")
def backup_start_date(self) -> Optional[str]:
"""
Date and time that the backup operation began
"""
return pulumi.get(self, "backup_start_date")
@property
@pulumi.getter(name="backupType")
def backup_type(self) -> Optional[str]:
"""
Enum of the different backup types
"""
return pulumi.get(self, "backup_type")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[str]:
"""
Name of the database to which the backup set belongs
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="firstLsn")
def first_lsn(self) -> Optional[str]:
"""
First log sequence number of the backup file
"""
return pulumi.get(self, "first_lsn")
@property
@pulumi.getter(name="isBackupRestored")
def is_backup_restored(self) -> Optional[bool]:
"""
Whether the backup set is restored or not
"""
return pulumi.get(self, "is_backup_restored")
@property
@pulumi.getter(name="lastLsn")
def last_lsn(self) -> Optional[str]:
"""
Last log sequence number of the backup file
"""
return pulumi.get(self, "last_lsn")
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> Optional[str]:
"""
Last modified time of the backup file in share location
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter(name="listOfBackupFiles")
def list_of_backup_files(self) -> Optional[Sequence['outputs.BackupFileInfoResponse']]:
"""
List of files in the backup set
"""
return pulumi.get(self, "list_of_backup_files")
@pulumi.output_type
class BlobShareResponse(dict):
"""
Blob container storage information.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sasUri":
suggest = "sas_uri"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in BlobShareResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
BlobShareResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
BlobShareResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
sas_uri: str):
"""
Blob container storage information.
:param str sas_uri: SAS URI of Azure Storage Account Container.
"""
pulumi.set(__self__, "sas_uri", sas_uri)
@property
@pulumi.getter(name="sasUri")
def sas_uri(self) -> str:
"""
SAS URI of Azure Storage Account Container.
"""
return pulumi.get(self, "sas_uri")
@pulumi.output_type
class ConnectToSourcePostgreSqlSyncTaskInputResponse(dict):
"""
Input for the task that validates connection to PostgreSQL and source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceConnectionInfo":
suggest = "source_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourcePostgreSqlSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourcePostgreSqlSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourcePostgreSqlSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_connection_info: 'outputs.PostgreSqlConnectionInfoResponse'):
"""
Input for the task that validates connection to PostgreSQL and source server requirements
:param 'PostgreSqlConnectionInfoResponse' source_connection_info: Connection information for source PostgreSQL server
"""
pulumi.set(__self__, "source_connection_info", source_connection_info)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.PostgreSqlConnectionInfoResponse':
"""
Connection information for source PostgreSQL server
"""
return pulumi.get(self, "source_connection_info")
@pulumi.output_type
class ConnectToSourcePostgreSqlSyncTaskOutputResponse(dict):
"""
Output for the task that validates connection to PostgreSQL and source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceServerBrandVersion":
suggest = "source_server_brand_version"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourcePostgreSqlSyncTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourcePostgreSqlSyncTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourcePostgreSqlSyncTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
databases: Sequence[str],
id: str,
source_server_brand_version: str,
source_server_version: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for the task that validates connection to PostgreSQL and source server requirements
:param Sequence[str] databases: List of databases on source server
:param str id: Result identifier
:param str source_server_brand_version: Source server brand version
:param str source_server_version: Version of the source server
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors associated with the task
"""
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "source_server_brand_version", source_server_brand_version)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter
def databases(self) -> Sequence[str]:
"""
List of databases on source server
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="sourceServerBrandVersion")
def source_server_brand_version(self) -> str:
"""
Source server brand version
"""
return pulumi.get(self, "source_server_brand_version")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Version of the source server
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors associated with the task
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ConnectToSourcePostgreSqlSyncTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to PostgreSQL server and source server requirements for online migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourcePostgreSqlSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourcePostgreSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourcePostgreSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToSourcePostgreSqlSyncTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToSourcePostgreSqlSyncTaskInputResponse'] = None):
"""
Properties for the task that validates connection to PostgreSQL server and source server requirements for online migration
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToSourcePostgreSqlSyncTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToSource.PostgreSql.Sync'.
:param 'ConnectToSourcePostgreSqlSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToSource.PostgreSql.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToSourcePostgreSqlSyncTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToSource.PostgreSql.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToSourcePostgreSqlSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToSourceSqlServerSyncTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to SQL Server and source server requirements for online migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.ConnectToSourceSqlServerTaskInputResponse'] = None):
"""
Properties for the task that validates connection to SQL Server and source server requirements for online migration
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse', 'ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse', 'ConnectToSourceSqlServerTaskOutputLoginLevelResponse', 'ConnectToSourceSqlServerTaskOutputTaskLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToSource.SqlServer.Sync'.
:param 'ConnectToSourceSqlServerTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToSource.SqlServer.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToSource.SqlServer.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToSourceSqlServerTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToSourceSqlServerTaskInputResponse(dict):
"""
Input for the task that validates connection to SQL Server and also validates source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "checkPermissionsGroup":
suggest = "check_permissions_group"
elif key == "collectAgentJobs":
suggest = "collect_agent_jobs"
elif key == "collectLogins":
suggest = "collect_logins"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_connection_info: 'outputs.SqlConnectionInfoResponse',
check_permissions_group: Optional[str] = None,
collect_agent_jobs: Optional[bool] = None,
collect_logins: Optional[bool] = None):
"""
Input for the task that validates connection to SQL Server and also validates source server requirements
:param 'SqlConnectionInfoResponse' source_connection_info: Connection information for Source SQL Server
:param str check_permissions_group: Permission group for validations
:param bool collect_agent_jobs: Flag for whether to collect agent jobs from source server.
:param bool collect_logins: Flag for whether to collect logins from source server.
"""
pulumi.set(__self__, "source_connection_info", source_connection_info)
if check_permissions_group is not None:
pulumi.set(__self__, "check_permissions_group", check_permissions_group)
if collect_agent_jobs is None:
collect_agent_jobs = False
if collect_agent_jobs is not None:
pulumi.set(__self__, "collect_agent_jobs", collect_agent_jobs)
if collect_logins is None:
collect_logins = False
if collect_logins is not None:
pulumi.set(__self__, "collect_logins", collect_logins)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for Source SQL Server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="checkPermissionsGroup")
def check_permissions_group(self) -> Optional[str]:
"""
Permission group for validations
"""
return pulumi.get(self, "check_permissions_group")
@property
@pulumi.getter(name="collectAgentJobs")
def collect_agent_jobs(self) -> Optional[bool]:
"""
Flag for whether to collect agent jobs from source server.
"""
return pulumi.get(self, "collect_agent_jobs")
@property
@pulumi.getter(name="collectLogins")
def collect_logins(self) -> Optional[bool]:
"""
Flag for whether to collect logins from source server.
"""
return pulumi.get(self, "collect_logins")
@pulumi.output_type
class ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse(dict):
"""
AgentJob level output for the task that validates connection to SQL Server and also validates source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "isEnabled":
suggest = "is_enabled"
elif key == "jobCategory":
suggest = "job_category"
elif key == "jobOwner":
suggest = "job_owner"
elif key == "lastExecutedOn":
suggest = "last_executed_on"
elif key == "migrationEligibility":
suggest = "migration_eligibility"
elif key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
is_enabled: bool,
job_category: str,
job_owner: str,
last_executed_on: str,
migration_eligibility: 'outputs.MigrationEligibilityInfoResponse',
name: str,
result_type: str):
"""
AgentJob level output for the task that validates connection to SQL Server and also validates source server requirements
:param str id: Result identifier
:param bool is_enabled: The state of the original AgentJob.
:param str job_category: The type of AgentJob.
:param str job_owner: The owner of the AgentJob
:param str last_executed_on: UTC Date and time when the AgentJob was last executed.
:param 'MigrationEligibilityInfoResponse' migration_eligibility: Information about eligibility of agent job for migration.
:param str name: AgentJob name
:param str result_type: Type of result - database level or task level
Expected value is 'AgentJobLevelOutput'.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_enabled", is_enabled)
pulumi.set(__self__, "job_category", job_category)
pulumi.set(__self__, "job_owner", job_owner)
pulumi.set(__self__, "last_executed_on", last_executed_on)
pulumi.set(__self__, "migration_eligibility", migration_eligibility)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "result_type", 'AgentJobLevelOutput')
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> bool:
"""
The state of the original AgentJob.
"""
return pulumi.get(self, "is_enabled")
@property
@pulumi.getter(name="jobCategory")
def job_category(self) -> str:
"""
The type of AgentJob.
"""
return pulumi.get(self, "job_category")
@property
@pulumi.getter(name="jobOwner")
def job_owner(self) -> str:
"""
The owner of the AgentJob
"""
return pulumi.get(self, "job_owner")
@property
@pulumi.getter(name="lastExecutedOn")
def last_executed_on(self) -> str:
"""
UTC Date and time when the AgentJob was last executed.
"""
return pulumi.get(self, "last_executed_on")
@property
@pulumi.getter(name="migrationEligibility")
def migration_eligibility(self) -> 'outputs.MigrationEligibilityInfoResponse':
"""
Information about eligibility of agent job for migration.
"""
return pulumi.get(self, "migration_eligibility")
@property
@pulumi.getter
def name(self) -> str:
"""
AgentJob name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Type of result - database level or task level
Expected value is 'AgentJobLevelOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse(dict):
"""
Database level output for the task that validates connection to SQL Server and also validates source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "compatibilityLevel":
suggest = "compatibility_level"
elif key == "databaseFiles":
suggest = "database_files"
elif key == "databaseState":
suggest = "database_state"
elif key == "resultType":
suggest = "result_type"
elif key == "sizeMB":
suggest = "size_mb"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
compatibility_level: str,
database_files: Sequence['outputs.DatabaseFileInfoResponse'],
database_state: str,
id: str,
name: str,
result_type: str,
size_mb: float):
"""
Database level output for the task that validates connection to SQL Server and also validates source server requirements
:param str compatibility_level: SQL Server compatibility level of database
:param Sequence['DatabaseFileInfoResponse'] database_files: The list of database files
:param str database_state: State of the database
:param str id: Result identifier
:param str name: Database name
:param str result_type: Type of result - database level or task level
Expected value is 'DatabaseLevelOutput'.
:param float size_mb: Size of the file in megabytes
"""
pulumi.set(__self__, "compatibility_level", compatibility_level)
pulumi.set(__self__, "database_files", database_files)
pulumi.set(__self__, "database_state", database_state)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "size_mb", size_mb)
@property
@pulumi.getter(name="compatibilityLevel")
def compatibility_level(self) -> str:
"""
SQL Server compatibility level of database
"""
return pulumi.get(self, "compatibility_level")
@property
@pulumi.getter(name="databaseFiles")
def database_files(self) -> Sequence['outputs.DatabaseFileInfoResponse']:
"""
The list of database files
"""
return pulumi.get(self, "database_files")
@property
@pulumi.getter(name="databaseState")
def database_state(self) -> str:
"""
State of the database
"""
return pulumi.get(self, "database_state")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Database name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Type of result - database level or task level
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sizeMB")
def size_mb(self) -> float:
"""
Size of the file in megabytes
"""
return pulumi.get(self, "size_mb")
@pulumi.output_type
class ConnectToSourceSqlServerTaskOutputLoginLevelResponse(dict):
"""
Login level output for the task that validates connection to SQL Server and also validates source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "defaultDatabase":
suggest = "default_database"
elif key == "isEnabled":
suggest = "is_enabled"
elif key == "loginType":
suggest = "login_type"
elif key == "migrationEligibility":
suggest = "migration_eligibility"
elif key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerTaskOutputLoginLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerTaskOutputLoginLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerTaskOutputLoginLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
default_database: str,
id: str,
is_enabled: bool,
login_type: str,
migration_eligibility: 'outputs.MigrationEligibilityInfoResponse',
name: str,
result_type: str):
"""
Login level output for the task that validates connection to SQL Server and also validates source server requirements
:param str default_database: The default database for the login.
:param str id: Result identifier
:param bool is_enabled: The state of the login.
:param str login_type: The type of login.
:param 'MigrationEligibilityInfoResponse' migration_eligibility: Information about eligibility of login for migration.
:param str name: Login name.
:param str result_type: Type of result - database level or task level
Expected value is 'LoginLevelOutput'.
"""
pulumi.set(__self__, "default_database", default_database)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_enabled", is_enabled)
pulumi.set(__self__, "login_type", login_type)
pulumi.set(__self__, "migration_eligibility", migration_eligibility)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "result_type", 'LoginLevelOutput')
@property
@pulumi.getter(name="defaultDatabase")
def default_database(self) -> str:
"""
The default database for the login.
"""
return pulumi.get(self, "default_database")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> bool:
"""
The state of the login.
"""
return pulumi.get(self, "is_enabled")
@property
@pulumi.getter(name="loginType")
def login_type(self) -> str:
"""
The type of login.
"""
return pulumi.get(self, "login_type")
@property
@pulumi.getter(name="migrationEligibility")
def migration_eligibility(self) -> 'outputs.MigrationEligibilityInfoResponse':
"""
Information about eligibility of login for migration.
"""
return pulumi.get(self, "migration_eligibility")
@property
@pulumi.getter
def name(self) -> str:
"""
Login name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Type of result - database level or task level
Expected value is 'LoginLevelOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class ConnectToSourceSqlServerTaskOutputTaskLevelResponse(dict):
"""
Task level output for the task that validates connection to SQL Server and also validates source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "agentJobs":
suggest = "agent_jobs"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceServerBrandVersion":
suggest = "source_server_brand_version"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerTaskOutputTaskLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerTaskOutputTaskLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerTaskOutputTaskLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
agent_jobs: Mapping[str, str],
databases: Mapping[str, str],
id: str,
logins: Mapping[str, str],
result_type: str,
source_server_brand_version: str,
source_server_version: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Task level output for the task that validates connection to SQL Server and also validates source server requirements
:param Mapping[str, str] agent_jobs: Source agent jobs as a map from agent job name to id.
:param Mapping[str, str] databases: Source databases as a map from database name to database id
:param str id: Result identifier
:param Mapping[str, str] logins: Source logins as a map from login name to login id.
:param str result_type: Type of result - database level or task level
Expected value is 'TaskLevelOutput'.
:param str source_server_brand_version: Source server brand version
:param str source_server_version: Source server version
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors
"""
pulumi.set(__self__, "agent_jobs", agent_jobs)
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "logins", logins)
pulumi.set(__self__, "result_type", 'TaskLevelOutput')
pulumi.set(__self__, "source_server_brand_version", source_server_brand_version)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="agentJobs")
def agent_jobs(self) -> Mapping[str, str]:
"""
Source agent jobs as a map from agent job name to id.
"""
return pulumi.get(self, "agent_jobs")
@property
@pulumi.getter
def databases(self) -> Mapping[str, str]:
"""
Source databases as a map from database name to database id
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def logins(self) -> Mapping[str, str]:
"""
Source logins as a map from login name to login id.
"""
return pulumi.get(self, "logins")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Type of result - database level or task level
Expected value is 'TaskLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceServerBrandVersion")
def source_server_brand_version(self) -> str:
"""
Source server brand version
"""
return pulumi.get(self, "source_server_brand_version")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ConnectToSourceSqlServerTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to SQL Server and also validates source server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToSourceSqlServerTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToSourceSqlServerTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToSourceSqlServerTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.ConnectToSourceSqlServerTaskInputResponse'] = None):
"""
Properties for the task that validates connection to SQL Server and also validates source server requirements
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['ConnectToSourceSqlServerTaskOutputAgentJobLevelResponse', 'ConnectToSourceSqlServerTaskOutputDatabaseLevelResponse', 'ConnectToSourceSqlServerTaskOutputLoginLevelResponse', 'ConnectToSourceSqlServerTaskOutputTaskLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToSource.SqlServer'.
:param 'ConnectToSourceSqlServerTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToSource.SqlServer')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToSource.SqlServer'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToSourceSqlServerTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToTargetAzureDbForMySqlTaskInputResponse(dict):
"""
Input for the task that validates connection to Azure Database for MySQL and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetAzureDbForMySqlTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetAzureDbForMySqlTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetAzureDbForMySqlTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_connection_info: 'outputs.MySqlConnectionInfoResponse',
target_connection_info: 'outputs.MySqlConnectionInfoResponse'):
"""
Input for the task that validates connection to Azure Database for MySQL and target server requirements
:param 'MySqlConnectionInfoResponse' source_connection_info: Connection information for source MySQL server
:param 'MySqlConnectionInfoResponse' target_connection_info: Connection information for target Azure Database for MySQL server
"""
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.MySqlConnectionInfoResponse':
"""
Connection information for source MySQL server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.MySqlConnectionInfoResponse':
"""
Connection information for target Azure Database for MySQL server
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ConnectToTargetAzureDbForMySqlTaskOutputResponse(dict):
"""
Output for the task that validates connection to Azure Database for MySQL and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverVersion":
suggest = "server_version"
elif key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetAzureDbForMySqlTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetAzureDbForMySqlTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetAzureDbForMySqlTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
databases: Sequence[str],
id: str,
server_version: str,
target_server_brand_version: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for the task that validates connection to Azure Database for MySQL and target server requirements
:param Sequence[str] databases: List of databases on target server
:param str id: Result identifier
:param str server_version: Version of the target server
:param str target_server_brand_version: Target server brand version
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors associated with the task
"""
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "server_version", server_version)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter
def databases(self) -> Sequence[str]:
"""
List of databases on target server
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="serverVersion")
def server_version(self) -> str:
"""
Version of the target server
"""
return pulumi.get(self, "server_version")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors associated with the task
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ConnectToTargetAzureDbForMySqlTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to Azure Database for MySQL and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetAzureDbForMySqlTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetAzureDbForMySqlTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetAzureDbForMySqlTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToTargetAzureDbForMySqlTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToTargetAzureDbForMySqlTaskInputResponse'] = None):
"""
Properties for the task that validates connection to Azure Database for MySQL and target server requirements
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToTargetAzureDbForMySqlTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToTarget.AzureDbForMySql'.
:param 'ConnectToTargetAzureDbForMySqlTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToTarget.AzureDbForMySql')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToTargetAzureDbForMySqlTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToTarget.AzureDbForMySql'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToTargetAzureDbForMySqlTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse(dict):
"""
Input for the task that validates connection to Azure Database for PostgreSQL and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_connection_info: 'outputs.PostgreSqlConnectionInfoResponse',
target_connection_info: 'outputs.PostgreSqlConnectionInfoResponse'):
"""
Input for the task that validates connection to Azure Database for PostgreSQL and target server requirements
:param 'PostgreSqlConnectionInfoResponse' source_connection_info: Connection information for source PostgreSQL server
:param 'PostgreSqlConnectionInfoResponse' target_connection_info: Connection information for target Azure Database for PostgreSQL server
"""
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.PostgreSqlConnectionInfoResponse':
"""
Connection information for source PostgreSQL server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.PostgreSqlConnectionInfoResponse':
"""
Connection information for target Azure Database for PostgreSQL server
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse(dict):
"""
Output for the task that validates connection to Azure Database for PostgreSQL and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerVersion":
suggest = "target_server_version"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
databases: Sequence[str],
id: str,
target_server_brand_version: str,
target_server_version: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for the task that validates connection to Azure Database for PostgreSQL and target server requirements
:param Sequence[str] databases: List of databases on target server
:param str id: Result identifier
:param str target_server_brand_version: Target server brand version
:param str target_server_version: Version of the target server
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors associated with the task
"""
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_version", target_server_version)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter
def databases(self) -> Sequence[str]:
"""
List of databases on target server
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Version of the target server
"""
return pulumi.get(self, "target_server_version")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors associated with the task
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ConnectToTargetAzureDbForPostgreSqlSyncTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to Azure Database For PostgreSQL server and target server requirements for online migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetAzureDbForPostgreSqlSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetAzureDbForPostgreSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetAzureDbForPostgreSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse'] = None):
"""
Properties for the task that validates connection to Azure Database For PostgreSQL server and target server requirements for online migration
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToTarget.AzureDbForPostgreSql.Sync'.
:param 'ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToTarget.AzureDbForPostgreSql.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToTargetAzureDbForPostgreSqlSyncTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToTarget.AzureDbForPostgreSql.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToTargetAzureDbForPostgreSqlSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToTargetSqlDbTaskInputResponse(dict):
"""
Input for the task that validates connection to SQL DB and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlDbTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlDbTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlDbTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_connection_info: 'outputs.SqlConnectionInfoResponse'):
"""
Input for the task that validates connection to SQL DB and target server requirements
:param 'SqlConnectionInfoResponse' target_connection_info: Connection information for target SQL DB
"""
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for target SQL DB
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ConnectToTargetSqlDbTaskOutputResponse(dict):
"""
Output for the task that validates connection to SQL DB and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlDbTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlDbTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlDbTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
databases: Mapping[str, str],
id: str,
target_server_brand_version: str,
target_server_version: str):
"""
Output for the task that validates connection to SQL DB and target server requirements
:param Mapping[str, str] databases: Source databases as a map from database name to database id
:param str id: Result identifier
:param str target_server_brand_version: Target server brand version
:param str target_server_version: Version of the target server
"""
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter
def databases(self) -> Mapping[str, str]:
"""
Source databases as a map from database name to database id
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Version of the target server
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class ConnectToTargetSqlDbTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to SQL DB and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlDbTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlDbTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlDbTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToTargetSqlDbTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToTargetSqlDbTaskInputResponse'] = None):
"""
Properties for the task that validates connection to SQL DB and target server requirements
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToTargetSqlDbTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToTarget.SqlDb'.
:param 'ConnectToTargetSqlDbTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToTarget.SqlDb')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToTargetSqlDbTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToTarget.SqlDb'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToTargetSqlDbTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToTargetSqlMISyncTaskInputResponse(dict):
"""
Input for the task that validates connection to Azure SQL Database Managed Instance online scenario.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "azureApp":
suggest = "azure_app"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlMISyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlMISyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlMISyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
azure_app: 'outputs.AzureActiveDirectoryAppResponse',
target_connection_info: 'outputs.MiSqlConnectionInfoResponse'):
"""
Input for the task that validates connection to Azure SQL Database Managed Instance online scenario.
:param 'AzureActiveDirectoryAppResponse' azure_app: Azure Active Directory Application the DMS instance will use to connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage Account
:param 'MiSqlConnectionInfoResponse' target_connection_info: Connection information for Azure SQL Database Managed Instance
"""
pulumi.set(__self__, "azure_app", azure_app)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="azureApp")
def azure_app(self) -> 'outputs.AzureActiveDirectoryAppResponse':
"""
Azure Active Directory Application the DMS instance will use to connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage Account
"""
return pulumi.get(self, "azure_app")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.MiSqlConnectionInfoResponse':
"""
Connection information for Azure SQL Database Managed Instance
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ConnectToTargetSqlMISyncTaskOutputResponse(dict):
"""
Output for the task that validates connection to Azure SQL Database Managed Instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerVersion":
suggest = "target_server_version"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlMISyncTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlMISyncTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlMISyncTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_server_brand_version: str,
target_server_version: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for the task that validates connection to Azure SQL Database Managed Instance.
:param str target_server_brand_version: Target server brand version
:param str target_server_version: Target server version
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors
"""
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_version", target_server_version)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ConnectToTargetSqlMISyncTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to Azure SQL Database Managed Instance
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlMISyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlMISyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlMISyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToTargetSqlMISyncTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToTargetSqlMISyncTaskInputResponse'] = None):
"""
Properties for the task that validates connection to Azure SQL Database Managed Instance
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToTargetSqlMISyncTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToTarget.AzureSqlDbMI.Sync.LRS'.
:param 'ConnectToTargetSqlMISyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToTarget.AzureSqlDbMI.Sync.LRS')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToTargetSqlMISyncTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToTarget.AzureSqlDbMI.Sync.LRS'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToTargetSqlMISyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToTargetSqlMITaskInputResponse(dict):
"""
Input for the task that validates connection to Azure SQL Database Managed Instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlMITaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlMITaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlMITaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
target_connection_info: 'outputs.SqlConnectionInfoResponse'):
"""
Input for the task that validates connection to Azure SQL Database Managed Instance.
:param 'SqlConnectionInfoResponse' target_connection_info: Connection information for target SQL Server
"""
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for target SQL Server
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ConnectToTargetSqlMITaskOutputResponse(dict):
"""
Output for the task that validates connection to Azure SQL Database Managed Instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "agentJobs":
suggest = "agent_jobs"
elif key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerVersion":
suggest = "target_server_version"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlMITaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlMITaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlMITaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
agent_jobs: Sequence[str],
id: str,
logins: Sequence[str],
target_server_brand_version: str,
target_server_version: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for the task that validates connection to Azure SQL Database Managed Instance.
:param Sequence[str] agent_jobs: List of agent jobs on the target server.
:param str id: Result identifier
:param Sequence[str] logins: List of logins on the target server.
:param str target_server_brand_version: Target server brand version
:param str target_server_version: Target server version
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors
"""
pulumi.set(__self__, "agent_jobs", agent_jobs)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "logins", logins)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_version", target_server_version)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="agentJobs")
def agent_jobs(self) -> Sequence[str]:
"""
List of agent jobs on the target server.
"""
return pulumi.get(self, "agent_jobs")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def logins(self) -> Sequence[str]:
"""
List of logins on the target server.
"""
return pulumi.get(self, "logins")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ConnectToTargetSqlMITaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to Azure SQL Database Managed Instance
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlMITaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlMITaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlMITaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToTargetSqlMITaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToTargetSqlMITaskInputResponse'] = None):
"""
Properties for the task that validates connection to Azure SQL Database Managed Instance
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToTargetSqlMITaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToTarget.AzureSqlDbMI'.
:param 'ConnectToTargetSqlMITaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToTarget.AzureSqlDbMI')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToTargetSqlMITaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToTarget.AzureSqlDbMI'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToTargetSqlMITaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ConnectToTargetSqlSqlDbSyncTaskInputResponse(dict):
"""
Input for the task that validates connection to Azure SQL DB and target server requirements
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlSqlDbSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlSqlDbSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlSqlDbSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse'):
"""
Input for the task that validates connection to Azure SQL DB and target server requirements
:param 'SqlConnectionInfoResponse' source_connection_info: Connection information for source SQL Server
:param 'SqlConnectionInfoResponse' target_connection_info: Connection information for target SQL DB
"""
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for source SQL Server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for target SQL DB
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ConnectToTargetSqlSqlDbSyncTaskPropertiesResponse(dict):
"""
Properties for the task that validates connection to SQL DB and target server requirements for online migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ConnectToTargetSqlSqlDbSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ConnectToTargetSqlSqlDbSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ConnectToTargetSqlSqlDbSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ConnectToTargetSqlDbTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ConnectToTargetSqlSqlDbSyncTaskInputResponse'] = None):
"""
Properties for the task that validates connection to SQL DB and target server requirements for online migration
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ConnectToTargetSqlDbTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ConnectToTarget.SqlDb.Sync'.
:param 'ConnectToTargetSqlSqlDbSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ConnectToTarget.SqlDb.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ConnectToTargetSqlDbTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ConnectToTarget.SqlDb.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ConnectToTargetSqlSqlDbSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class DataIntegrityValidationResultResponse(dict):
"""
Results for checksum based Data Integrity validation results
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "failedObjects":
suggest = "failed_objects"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DataIntegrityValidationResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DataIntegrityValidationResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DataIntegrityValidationResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
failed_objects: Mapping[str, str],
validation_errors: 'outputs.ValidationErrorResponse'):
"""
Results for checksum based Data Integrity validation results
:param Mapping[str, str] failed_objects: List of failed table names of source and target pair
:param 'ValidationErrorResponse' validation_errors: List of errors that happened while performing data integrity validation
"""
pulumi.set(__self__, "failed_objects", failed_objects)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="failedObjects")
def failed_objects(self) -> Mapping[str, str]:
"""
List of failed table names of source and target pair
"""
return pulumi.get(self, "failed_objects")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> 'outputs.ValidationErrorResponse':
"""
List of errors that happened while performing data integrity validation
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class DataItemMigrationSummaryResultResponse(dict):
"""
Basic summary of a data item migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "errorPrefix":
suggest = "error_prefix"
elif key == "itemsCompletedCount":
suggest = "items_completed_count"
elif key == "itemsCount":
suggest = "items_count"
elif key == "resultPrefix":
suggest = "result_prefix"
elif key == "startedOn":
suggest = "started_on"
elif key == "statusMessage":
suggest = "status_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DataItemMigrationSummaryResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DataItemMigrationSummaryResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DataItemMigrationSummaryResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
error_prefix: str,
items_completed_count: float,
items_count: float,
name: str,
result_prefix: str,
started_on: str,
state: str,
status_message: str):
"""
Basic summary of a data item migration
:param str ended_on: Migration end time
:param str error_prefix: Wildcard string prefix to use for querying all errors of the item
:param float items_completed_count: Number of successfully completed items
:param float items_count: Number of items
:param str name: Name of the item
:param str result_prefix: Wildcard string prefix to use for querying all sub-tem results of the item
:param str started_on: Migration start time
:param str state: Current state of migration
:param str status_message: Status message
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "error_prefix", error_prefix)
pulumi.set(__self__, "items_completed_count", items_completed_count)
pulumi.set(__self__, "items_count", items_count)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "result_prefix", result_prefix)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "status_message", status_message)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="errorPrefix")
def error_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all errors of the item
"""
return pulumi.get(self, "error_prefix")
@property
@pulumi.getter(name="itemsCompletedCount")
def items_completed_count(self) -> float:
"""
Number of successfully completed items
"""
return pulumi.get(self, "items_completed_count")
@property
@pulumi.getter(name="itemsCount")
def items_count(self) -> float:
"""
Number of items
"""
return pulumi.get(self, "items_count")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the item
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resultPrefix")
def result_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all sub-tem results of the item
"""
return pulumi.get(self, "result_prefix")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> str:
"""
Status message
"""
return pulumi.get(self, "status_message")
@pulumi.output_type
class DatabaseBackupInfoResponse(dict):
"""
Information about backup files when existing backup mode is used.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backupFiles":
suggest = "backup_files"
elif key == "backupFinishDate":
suggest = "backup_finish_date"
elif key == "backupType":
suggest = "backup_type"
elif key == "databaseName":
suggest = "database_name"
elif key == "familyCount":
suggest = "family_count"
elif key == "isCompressed":
suggest = "is_compressed"
elif key == "isDamaged":
suggest = "is_damaged"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DatabaseBackupInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DatabaseBackupInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DatabaseBackupInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backup_files: Sequence[str],
backup_finish_date: str,
backup_type: str,
database_name: str,
family_count: int,
is_compressed: bool,
is_damaged: bool,
position: int):
"""
Information about backup files when existing backup mode is used.
:param Sequence[str] backup_files: The list of backup files for the current database.
:param str backup_finish_date: Date and time when the backup operation finished.
:param str backup_type: Backup Type.
:param str database_name: Database name.
:param int family_count: Number of files in the backup set.
:param bool is_compressed: Whether the backup set is compressed
:param bool is_damaged: Database was damaged when backed up, but the backup operation was requested to continue despite errors.
:param int position: Position of current database backup in the file.
"""
pulumi.set(__self__, "backup_files", backup_files)
pulumi.set(__self__, "backup_finish_date", backup_finish_date)
pulumi.set(__self__, "backup_type", backup_type)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "family_count", family_count)
pulumi.set(__self__, "is_compressed", is_compressed)
pulumi.set(__self__, "is_damaged", is_damaged)
pulumi.set(__self__, "position", position)
@property
@pulumi.getter(name="backupFiles")
def backup_files(self) -> Sequence[str]:
"""
The list of backup files for the current database.
"""
return pulumi.get(self, "backup_files")
@property
@pulumi.getter(name="backupFinishDate")
def backup_finish_date(self) -> str:
"""
Date and time when the backup operation finished.
"""
return pulumi.get(self, "backup_finish_date")
@property
@pulumi.getter(name="backupType")
def backup_type(self) -> str:
"""
Backup Type.
"""
return pulumi.get(self, "backup_type")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Database name.
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="familyCount")
def family_count(self) -> int:
"""
Number of files in the backup set.
"""
return pulumi.get(self, "family_count")
@property
@pulumi.getter(name="isCompressed")
def is_compressed(self) -> bool:
"""
Whether the backup set is compressed
"""
return pulumi.get(self, "is_compressed")
@property
@pulumi.getter(name="isDamaged")
def is_damaged(self) -> bool:
"""
Database was damaged when backed up, but the backup operation was requested to continue despite errors.
"""
return pulumi.get(self, "is_damaged")
@property
@pulumi.getter
def position(self) -> int:
"""
Position of current database backup in the file.
"""
return pulumi.get(self, "position")
@pulumi.output_type
class DatabaseFileInfoResponse(dict):
"""
Database file specific information
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseName":
suggest = "database_name"
elif key == "fileType":
suggest = "file_type"
elif key == "logicalName":
suggest = "logical_name"
elif key == "physicalFullName":
suggest = "physical_full_name"
elif key == "restoreFullName":
suggest = "restore_full_name"
elif key == "sizeMB":
suggest = "size_mb"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DatabaseFileInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DatabaseFileInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DatabaseFileInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_name: Optional[str] = None,
file_type: Optional[str] = None,
id: Optional[str] = None,
logical_name: Optional[str] = None,
physical_full_name: Optional[str] = None,
restore_full_name: Optional[str] = None,
size_mb: Optional[float] = None):
"""
Database file specific information
:param str database_name: Name of the database
:param str file_type: Database file type
:param str id: Unique identifier for database file
:param str logical_name: Logical name of the file
:param str physical_full_name: Operating-system full path of the file
:param str restore_full_name: Suggested full path of the file for restoring
:param float size_mb: Size of the file in megabytes
"""
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if file_type is not None:
pulumi.set(__self__, "file_type", file_type)
if id is not None:
pulumi.set(__self__, "id", id)
if logical_name is not None:
pulumi.set(__self__, "logical_name", logical_name)
if physical_full_name is not None:
pulumi.set(__self__, "physical_full_name", physical_full_name)
if restore_full_name is not None:
pulumi.set(__self__, "restore_full_name", restore_full_name)
if size_mb is not None:
pulumi.set(__self__, "size_mb", size_mb)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[str]:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="fileType")
def file_type(self) -> Optional[str]:
"""
Database file type
"""
return pulumi.get(self, "file_type")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Unique identifier for database file
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="logicalName")
def logical_name(self) -> Optional[str]:
"""
Logical name of the file
"""
return pulumi.get(self, "logical_name")
@property
@pulumi.getter(name="physicalFullName")
def physical_full_name(self) -> Optional[str]:
"""
Operating-system full path of the file
"""
return pulumi.get(self, "physical_full_name")
@property
@pulumi.getter(name="restoreFullName")
def restore_full_name(self) -> Optional[str]:
"""
Suggested full path of the file for restoring
"""
return pulumi.get(self, "restore_full_name")
@property
@pulumi.getter(name="sizeMB")
def size_mb(self) -> Optional[float]:
"""
Size of the file in megabytes
"""
return pulumi.get(self, "size_mb")
@pulumi.output_type
class DatabaseInfoResponse(dict):
"""
Project Database Details
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceDatabaseName":
suggest = "source_database_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DatabaseInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DatabaseInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DatabaseInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_database_name: str):
"""
Project Database Details
:param str source_database_name: Name of the database
"""
pulumi.set(__self__, "source_database_name", source_database_name)
@property
@pulumi.getter(name="sourceDatabaseName")
def source_database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "source_database_name")
@pulumi.output_type
class DatabaseSummaryResultResponse(dict):
"""
Summary of database results in the migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "errorPrefix":
suggest = "error_prefix"
elif key == "itemsCompletedCount":
suggest = "items_completed_count"
elif key == "itemsCount":
suggest = "items_count"
elif key == "resultPrefix":
suggest = "result_prefix"
elif key == "sizeMB":
suggest = "size_mb"
elif key == "startedOn":
suggest = "started_on"
elif key == "statusMessage":
suggest = "status_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DatabaseSummaryResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DatabaseSummaryResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DatabaseSummaryResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
error_prefix: str,
items_completed_count: float,
items_count: float,
name: str,
result_prefix: str,
size_mb: float,
started_on: str,
state: str,
status_message: str):
"""
Summary of database results in the migration
:param str ended_on: Migration end time
:param str error_prefix: Wildcard string prefix to use for querying all errors of the item
:param float items_completed_count: Number of successfully completed items
:param float items_count: Number of items
:param str name: Name of the item
:param str result_prefix: Wildcard string prefix to use for querying all sub-tem results of the item
:param float size_mb: Size of the database in megabytes
:param str started_on: Migration start time
:param str state: Current state of migration
:param str status_message: Status message
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "error_prefix", error_prefix)
pulumi.set(__self__, "items_completed_count", items_completed_count)
pulumi.set(__self__, "items_count", items_count)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "result_prefix", result_prefix)
pulumi.set(__self__, "size_mb", size_mb)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "status_message", status_message)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="errorPrefix")
def error_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all errors of the item
"""
return pulumi.get(self, "error_prefix")
@property
@pulumi.getter(name="itemsCompletedCount")
def items_completed_count(self) -> float:
"""
Number of successfully completed items
"""
return pulumi.get(self, "items_completed_count")
@property
@pulumi.getter(name="itemsCount")
def items_count(self) -> float:
"""
Number of items
"""
return pulumi.get(self, "items_count")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the item
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resultPrefix")
def result_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all sub-tem results of the item
"""
return pulumi.get(self, "result_prefix")
@property
@pulumi.getter(name="sizeMB")
def size_mb(self) -> float:
"""
Size of the database in megabytes
"""
return pulumi.get(self, "size_mb")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> str:
"""
Status message
"""
return pulumi.get(self, "status_message")
@pulumi.output_type
class DatabaseTableResponse(dict):
"""
Table properties
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "hasRows":
suggest = "has_rows"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in DatabaseTableResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
DatabaseTableResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
DatabaseTableResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
has_rows: bool,
name: str):
"""
Table properties
:param bool has_rows: Indicates whether table is empty or not
:param str name: Schema-qualified name of the table
"""
pulumi.set(__self__, "has_rows", has_rows)
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="hasRows")
def has_rows(self) -> bool:
"""
Indicates whether table is empty or not
"""
return pulumi.get(self, "has_rows")
@property
@pulumi.getter
def name(self) -> str:
"""
Schema-qualified name of the table
"""
return pulumi.get(self, "name")
@pulumi.output_type
class ExecutionStatisticsResponse(dict):
"""
Description about the errors happen while performing migration validation
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cpuTimeMs":
suggest = "cpu_time_ms"
elif key == "elapsedTimeMs":
suggest = "elapsed_time_ms"
elif key == "executionCount":
suggest = "execution_count"
elif key == "hasErrors":
suggest = "has_errors"
elif key == "sqlErrors":
suggest = "sql_errors"
elif key == "waitStats":
suggest = "wait_stats"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ExecutionStatisticsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ExecutionStatisticsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ExecutionStatisticsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu_time_ms: float,
elapsed_time_ms: float,
execution_count: float,
has_errors: bool,
sql_errors: Sequence[str],
wait_stats: Optional[Mapping[str, 'outputs.WaitStatisticsResponse']] = None):
"""
Description about the errors happen while performing migration validation
:param float cpu_time_ms: CPU Time in millisecond(s) for the query execution
:param float elapsed_time_ms: Time taken in millisecond(s) for executing the query
:param float execution_count: No. of query executions
:param bool has_errors: Indicates whether the query resulted in an error
:param Sequence[str] sql_errors: List of sql Errors
:param Mapping[str, 'WaitStatisticsResponse'] wait_stats: Dictionary of sql query execution wait types and the respective statistics
"""
pulumi.set(__self__, "cpu_time_ms", cpu_time_ms)
pulumi.set(__self__, "elapsed_time_ms", elapsed_time_ms)
pulumi.set(__self__, "execution_count", execution_count)
pulumi.set(__self__, "has_errors", has_errors)
pulumi.set(__self__, "sql_errors", sql_errors)
if wait_stats is not None:
pulumi.set(__self__, "wait_stats", wait_stats)
@property
@pulumi.getter(name="cpuTimeMs")
def cpu_time_ms(self) -> float:
"""
CPU Time in millisecond(s) for the query execution
"""
return pulumi.get(self, "cpu_time_ms")
@property
@pulumi.getter(name="elapsedTimeMs")
def elapsed_time_ms(self) -> float:
"""
Time taken in millisecond(s) for executing the query
"""
return pulumi.get(self, "elapsed_time_ms")
@property
@pulumi.getter(name="executionCount")
def execution_count(self) -> float:
"""
No. of query executions
"""
return pulumi.get(self, "execution_count")
@property
@pulumi.getter(name="hasErrors")
def has_errors(self) -> bool:
"""
Indicates whether the query resulted in an error
"""
return pulumi.get(self, "has_errors")
@property
@pulumi.getter(name="sqlErrors")
def sql_errors(self) -> Sequence[str]:
"""
List of sql Errors
"""
return pulumi.get(self, "sql_errors")
@property
@pulumi.getter(name="waitStats")
def wait_stats(self) -> Optional[Mapping[str, 'outputs.WaitStatisticsResponse']]:
"""
Dictionary of sql query execution wait types and the respective statistics
"""
return pulumi.get(self, "wait_stats")
@pulumi.output_type
class FileShareResponse(dict):
"""
File share information with Path, Username, and Password.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in FileShareResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
FileShareResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
FileShareResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
path: str,
password: Optional[str] = None,
user_name: Optional[str] = None):
"""
File share information with Path, Username, and Password.
:param str path: The folder path for this share.
:param str password: Password credential used to connect to the share location.
:param str user_name: User name credential to connect to the share location
"""
pulumi.set(__self__, "path", path)
if password is not None:
pulumi.set(__self__, "password", password)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter
def path(self) -> str:
"""
The folder path for this share.
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
Password credential used to connect to the share location.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
"""
User name credential to connect to the share location
"""
return pulumi.get(self, "user_name")
@pulumi.output_type
class GetTdeCertificatesSqlTaskInputResponse(dict):
"""
Input for the task that gets TDE certificates in Base64 encoded format.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backupFileShare":
suggest = "backup_file_share"
elif key == "connectionInfo":
suggest = "connection_info"
elif key == "selectedCertificates":
suggest = "selected_certificates"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetTdeCertificatesSqlTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetTdeCertificatesSqlTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetTdeCertificatesSqlTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backup_file_share: 'outputs.FileShareResponse',
connection_info: 'outputs.SqlConnectionInfoResponse',
selected_certificates: Sequence['outputs.SelectedCertificateInputResponse']):
"""
Input for the task that gets TDE certificates in Base64 encoded format.
:param 'FileShareResponse' backup_file_share: Backup file share information for file share to be used for temporarily storing files.
:param 'SqlConnectionInfoResponse' connection_info: Connection information for SQL Server
:param Sequence['SelectedCertificateInputResponse'] selected_certificates: List containing certificate names and corresponding password to use for encrypting the exported certificate.
"""
pulumi.set(__self__, "backup_file_share", backup_file_share)
pulumi.set(__self__, "connection_info", connection_info)
pulumi.set(__self__, "selected_certificates", selected_certificates)
@property
@pulumi.getter(name="backupFileShare")
def backup_file_share(self) -> 'outputs.FileShareResponse':
"""
Backup file share information for file share to be used for temporarily storing files.
"""
return pulumi.get(self, "backup_file_share")
@property
@pulumi.getter(name="connectionInfo")
def connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for SQL Server
"""
return pulumi.get(self, "connection_info")
@property
@pulumi.getter(name="selectedCertificates")
def selected_certificates(self) -> Sequence['outputs.SelectedCertificateInputResponse']:
"""
List containing certificate names and corresponding password to use for encrypting the exported certificate.
"""
return pulumi.get(self, "selected_certificates")
@pulumi.output_type
class GetTdeCertificatesSqlTaskOutputResponse(dict):
"""
Output of the task that gets TDE certificates in Base64 encoded format.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "base64EncodedCertificates":
suggest = "base64_encoded_certificates"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetTdeCertificatesSqlTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetTdeCertificatesSqlTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetTdeCertificatesSqlTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
base64_encoded_certificates: Mapping[str, Sequence[str]],
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output of the task that gets TDE certificates in Base64 encoded format.
:param Mapping[str, Sequence[str]] base64_encoded_certificates: Mapping from certificate name to base 64 encoded format.
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors
"""
pulumi.set(__self__, "base64_encoded_certificates", base64_encoded_certificates)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="base64EncodedCertificates")
def base64_encoded_certificates(self) -> Mapping[str, Sequence[str]]:
"""
Mapping from certificate name to base 64 encoded format.
"""
return pulumi.get(self, "base64_encoded_certificates")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class GetTdeCertificatesSqlTaskPropertiesResponse(dict):
"""
Properties for the task that gets TDE certificates in Base64 encoded format.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetTdeCertificatesSqlTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetTdeCertificatesSqlTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetTdeCertificatesSqlTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.GetTdeCertificatesSqlTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.GetTdeCertificatesSqlTaskInputResponse'] = None):
"""
Properties for the task that gets TDE certificates in Base64 encoded format.
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['GetTdeCertificatesSqlTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'GetTDECertificates.Sql'.
:param 'GetTdeCertificatesSqlTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'GetTDECertificates.Sql')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.GetTdeCertificatesSqlTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'GetTDECertificates.Sql'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.GetTdeCertificatesSqlTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class GetUserTablesSqlSyncTaskInputResponse(dict):
"""
Input for the task that collects user tables for the given list of databases
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "selectedSourceDatabases":
suggest = "selected_source_databases"
elif key == "selectedTargetDatabases":
suggest = "selected_target_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetUserTablesSqlSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetUserTablesSqlSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetUserTablesSqlSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
selected_source_databases: Sequence[str],
selected_target_databases: Sequence[str],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse'):
"""
Input for the task that collects user tables for the given list of databases
:param Sequence[str] selected_source_databases: List of source database names to collect tables for
:param Sequence[str] selected_target_databases: List of target database names to collect tables for
:param 'SqlConnectionInfoResponse' source_connection_info: Connection information for SQL Server
:param 'SqlConnectionInfoResponse' target_connection_info: Connection information for SQL DB
"""
pulumi.set(__self__, "selected_source_databases", selected_source_databases)
pulumi.set(__self__, "selected_target_databases", selected_target_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="selectedSourceDatabases")
def selected_source_databases(self) -> Sequence[str]:
"""
List of source database names to collect tables for
"""
return pulumi.get(self, "selected_source_databases")
@property
@pulumi.getter(name="selectedTargetDatabases")
def selected_target_databases(self) -> Sequence[str]:
"""
List of target database names to collect tables for
"""
return pulumi.get(self, "selected_target_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for SQL Server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for SQL DB
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class GetUserTablesSqlSyncTaskOutputResponse(dict):
"""
Output of the task that collects user tables for the given list of databases
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databasesToSourceTables":
suggest = "databases_to_source_tables"
elif key == "databasesToTargetTables":
suggest = "databases_to_target_tables"
elif key == "tableValidationErrors":
suggest = "table_validation_errors"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetUserTablesSqlSyncTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetUserTablesSqlSyncTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetUserTablesSqlSyncTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
databases_to_source_tables: Mapping[str, Sequence['outputs.DatabaseTableResponse']],
databases_to_target_tables: Mapping[str, Sequence['outputs.DatabaseTableResponse']],
table_validation_errors: Mapping[str, Sequence[str]],
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output of the task that collects user tables for the given list of databases
:param Mapping[str, Sequence['DatabaseTableResponse']] databases_to_source_tables: Mapping from database name to list of source tables
:param Mapping[str, Sequence['DatabaseTableResponse']] databases_to_target_tables: Mapping from database name to list of target tables
:param Mapping[str, Sequence[str]] table_validation_errors: Mapping from database name to list of validation errors
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors
"""
pulumi.set(__self__, "databases_to_source_tables", databases_to_source_tables)
pulumi.set(__self__, "databases_to_target_tables", databases_to_target_tables)
pulumi.set(__self__, "table_validation_errors", table_validation_errors)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="databasesToSourceTables")
def databases_to_source_tables(self) -> Mapping[str, Sequence['outputs.DatabaseTableResponse']]:
"""
Mapping from database name to list of source tables
"""
return pulumi.get(self, "databases_to_source_tables")
@property
@pulumi.getter(name="databasesToTargetTables")
def databases_to_target_tables(self) -> Mapping[str, Sequence['outputs.DatabaseTableResponse']]:
"""
Mapping from database name to list of target tables
"""
return pulumi.get(self, "databases_to_target_tables")
@property
@pulumi.getter(name="tableValidationErrors")
def table_validation_errors(self) -> Mapping[str, Sequence[str]]:
"""
Mapping from database name to list of validation errors
"""
return pulumi.get(self, "table_validation_errors")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class GetUserTablesSqlSyncTaskPropertiesResponse(dict):
"""
Properties for the task that collects user tables for the given list of databases
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetUserTablesSqlSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetUserTablesSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetUserTablesSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.GetUserTablesSqlSyncTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.GetUserTablesSqlSyncTaskInputResponse'] = None):
"""
Properties for the task that collects user tables for the given list of databases
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['GetUserTablesSqlSyncTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'GetUserTables.AzureSqlDb.Sync'.
:param 'GetUserTablesSqlSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'GetUserTables.AzureSqlDb.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.GetUserTablesSqlSyncTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'GetUserTables.AzureSqlDb.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.GetUserTablesSqlSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class GetUserTablesSqlTaskInputResponse(dict):
"""
Input for the task that collects user tables for the given list of databases
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "connectionInfo":
suggest = "connection_info"
elif key == "selectedDatabases":
suggest = "selected_databases"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetUserTablesSqlTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetUserTablesSqlTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetUserTablesSqlTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
connection_info: 'outputs.SqlConnectionInfoResponse',
selected_databases: Sequence[str]):
"""
Input for the task that collects user tables for the given list of databases
:param 'SqlConnectionInfoResponse' connection_info: Connection information for SQL Server
:param Sequence[str] selected_databases: List of database names to collect tables for
"""
pulumi.set(__self__, "connection_info", connection_info)
pulumi.set(__self__, "selected_databases", selected_databases)
@property
@pulumi.getter(name="connectionInfo")
def connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for SQL Server
"""
return pulumi.get(self, "connection_info")
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence[str]:
"""
List of database names to collect tables for
"""
return pulumi.get(self, "selected_databases")
@pulumi.output_type
class GetUserTablesSqlTaskOutputResponse(dict):
"""
Output of the task that collects user tables for the given list of databases
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databasesToTables":
suggest = "databases_to_tables"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetUserTablesSqlTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetUserTablesSqlTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetUserTablesSqlTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
databases_to_tables: Mapping[str, Sequence['outputs.DatabaseTableResponse']],
id: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output of the task that collects user tables for the given list of databases
:param Mapping[str, Sequence['DatabaseTableResponse']] databases_to_tables: Mapping from database name to list of tables
:param str id: Result identifier
:param Sequence['ReportableExceptionResponse'] validation_errors: Validation errors
"""
pulumi.set(__self__, "databases_to_tables", databases_to_tables)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="databasesToTables")
def databases_to_tables(self) -> Mapping[str, Sequence['outputs.DatabaseTableResponse']]:
"""
Mapping from database name to list of tables
"""
return pulumi.get(self, "databases_to_tables")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Validation errors
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class GetUserTablesSqlTaskPropertiesResponse(dict):
"""
Properties for the task that collects user tables for the given list of databases
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in GetUserTablesSqlTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
GetUserTablesSqlTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
GetUserTablesSqlTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.GetUserTablesSqlTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.GetUserTablesSqlTaskInputResponse'] = None):
"""
Properties for the task that collects user tables for the given list of databases
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['GetUserTablesSqlTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'GetUserTables.Sql'.
:param 'GetUserTablesSqlTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'GetUserTables.Sql')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.GetUserTablesSqlTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'GetUserTables.Sql'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.GetUserTablesSqlTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MiSqlConnectionInfoResponse(dict):
"""
Properties required to create a connection to Azure SQL database Managed instance
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "managedInstanceResourceId":
suggest = "managed_instance_resource_id"
elif key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MiSqlConnectionInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MiSqlConnectionInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MiSqlConnectionInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
managed_instance_resource_id: str,
type: str,
password: Optional[str] = None,
user_name: Optional[str] = None):
"""
Properties required to create a connection to Azure SQL database Managed instance
:param str managed_instance_resource_id: Resource id for Azure SQL database Managed instance
:param str type: Type of connection info
Expected value is 'MiSqlConnectionInfo'.
:param str password: Password credential.
:param str user_name: User name
"""
pulumi.set(__self__, "managed_instance_resource_id", managed_instance_resource_id)
pulumi.set(__self__, "type", 'MiSqlConnectionInfo')
if password is not None:
pulumi.set(__self__, "password", password)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter(name="managedInstanceResourceId")
def managed_instance_resource_id(self) -> str:
"""
Resource id for Azure SQL database Managed instance
"""
return pulumi.get(self, "managed_instance_resource_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of connection info
Expected value is 'MiSqlConnectionInfo'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
Password credential.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
"""
User name
"""
return pulumi.get(self, "user_name")
@pulumi.output_type
class MigrateMISyncCompleteCommandInputResponse(dict):
"""
Input for command that completes online migration for an Azure SQL Database Managed Instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sourceDatabaseName":
suggest = "source_database_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMISyncCompleteCommandInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMISyncCompleteCommandInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMISyncCompleteCommandInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
source_database_name: str):
"""
Input for command that completes online migration for an Azure SQL Database Managed Instance.
:param str source_database_name: Name of managed instance database
"""
pulumi.set(__self__, "source_database_name", source_database_name)
@property
@pulumi.getter(name="sourceDatabaseName")
def source_database_name(self) -> str:
"""
Name of managed instance database
"""
return pulumi.get(self, "source_database_name")
@pulumi.output_type
class MigrateMISyncCompleteCommandOutputResponse(dict):
"""
Output for command that completes online migration for an Azure SQL Database Managed Instance.
"""
def __init__(__self__, *,
errors: Optional[Sequence['outputs.ReportableExceptionResponse']] = None):
"""
Output for command that completes online migration for an Azure SQL Database Managed Instance.
:param Sequence['ReportableExceptionResponse'] errors: List of errors that happened during the command execution
"""
if errors is not None:
pulumi.set(__self__, "errors", errors)
@property
@pulumi.getter
def errors(self) -> Optional[Sequence['outputs.ReportableExceptionResponse']]:
"""
List of errors that happened during the command execution
"""
return pulumi.get(self, "errors")
@pulumi.output_type
class MigrateMISyncCompleteCommandPropertiesResponse(dict):
"""
Properties for the command that completes online migration for an Azure SQL Database Managed Instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "commandType":
suggest = "command_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMISyncCompleteCommandPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMISyncCompleteCommandPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMISyncCompleteCommandPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
command_type: str,
errors: Sequence['outputs.ODataErrorResponse'],
output: 'outputs.MigrateMISyncCompleteCommandOutputResponse',
state: str,
input: Optional['outputs.MigrateMISyncCompleteCommandInputResponse'] = None):
"""
Properties for the command that completes online migration for an Azure SQL Database Managed Instance.
:param str command_type: Command type.
Expected value is 'Migrate.SqlServer.AzureDbSqlMi.Complete'.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param 'MigrateMISyncCompleteCommandOutputResponse' output: Command output. This is ignored if submitted.
:param str state: The state of the command. This is ignored if submitted.
:param 'MigrateMISyncCompleteCommandInputResponse' input: Command input
"""
pulumi.set(__self__, "command_type", 'Migrate.SqlServer.AzureDbSqlMi.Complete')
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter(name="commandType")
def command_type(self) -> str:
"""
Command type.
Expected value is 'Migrate.SqlServer.AzureDbSqlMi.Complete'.
"""
return pulumi.get(self, "command_type")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> 'outputs.MigrateMISyncCompleteCommandOutputResponse':
"""
Command output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the command. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateMISyncCompleteCommandInputResponse']:
"""
Command input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse(dict):
"""
Database specific information for MySQL to Azure Database for MySQL migration task inputs
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "migrationSetting":
suggest = "migration_setting"
elif key == "sourceSetting":
suggest = "source_setting"
elif key == "targetDatabaseName":
suggest = "target_database_name"
elif key == "targetSetting":
suggest = "target_setting"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
migration_setting: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
source_setting: Optional[Mapping[str, str]] = None,
target_database_name: Optional[str] = None,
target_setting: Optional[Mapping[str, str]] = None):
"""
Database specific information for MySQL to Azure Database for MySQL migration task inputs
:param Mapping[str, str] migration_setting: Migration settings which tune the migration behavior
:param str name: Name of the database
:param Mapping[str, str] source_setting: Source settings to tune source endpoint migration behavior
:param str target_database_name: Name of target database. Note: Target database will be truncated before starting migration.
:param Mapping[str, str] target_setting: Target settings to tune target endpoint migration behavior
"""
if migration_setting is not None:
pulumi.set(__self__, "migration_setting", migration_setting)
if name is not None:
pulumi.set(__self__, "name", name)
if source_setting is not None:
pulumi.set(__self__, "source_setting", source_setting)
if target_database_name is not None:
pulumi.set(__self__, "target_database_name", target_database_name)
if target_setting is not None:
pulumi.set(__self__, "target_setting", target_setting)
@property
@pulumi.getter(name="migrationSetting")
def migration_setting(self) -> Optional[Mapping[str, str]]:
"""
Migration settings which tune the migration behavior
"""
return pulumi.get(self, "migration_setting")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="sourceSetting")
def source_setting(self) -> Optional[Mapping[str, str]]:
"""
Source settings to tune source endpoint migration behavior
"""
return pulumi.get(self, "source_setting")
@property
@pulumi.getter(name="targetDatabaseName")
def target_database_name(self) -> Optional[str]:
"""
Name of target database. Note: Target database will be truncated before starting migration.
"""
return pulumi.get(self, "target_database_name")
@property
@pulumi.getter(name="targetSetting")
def target_setting(self) -> Optional[Mapping[str, str]]:
"""
Target settings to tune target endpoint migration behavior
"""
return pulumi.get(self, "target_setting")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskInputResponse(dict):
"""
Input for the task that migrates MySQL databases to Azure Database for MySQL for online migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
selected_databases: Sequence['outputs.MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse'],
source_connection_info: 'outputs.MySqlConnectionInfoResponse',
target_connection_info: 'outputs.MySqlConnectionInfoResponse'):
"""
Input for the task that migrates MySQL databases to Azure Database for MySQL for online migrations
:param Sequence['MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'MySqlConnectionInfoResponse' source_connection_info: Connection information for source MySQL
:param 'MySqlConnectionInfoResponse' target_connection_info: Connection information for target Azure Database for MySQL
"""
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateMySqlAzureDbForMySqlSyncDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.MySqlConnectionInfoResponse':
"""
Connection information for source MySQL
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.MySqlConnectionInfoResponse':
"""
Connection information for target Azure Database for MySQL
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
elif key == "errorMessage":
suggest = "error_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
result_type: str,
error_message: Optional[str] = None,
events: Optional[Sequence['outputs.SyncMigrationDatabaseErrorEventResponse']] = None):
"""
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'DatabaseLevelErrorOutput'.
:param str error_message: Error message
:param Sequence['SyncMigrationDatabaseErrorEventResponse'] events: List of error events.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'DatabaseLevelErrorOutput')
if error_message is not None:
pulumi.set(__self__, "error_message", error_message)
if events is not None:
pulumi.set(__self__, "events", events)
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelErrorOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> Optional[str]:
"""
Error message
"""
return pulumi.get(self, "error_message")
@property
@pulumi.getter
def events(self) -> Optional[Sequence['outputs.SyncMigrationDatabaseErrorEventResponse']]:
"""
List of error events.
"""
return pulumi.get(self, "events")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appliedChanges":
suggest = "applied_changes"
elif key == "cdcDeleteCounter":
suggest = "cdc_delete_counter"
elif key == "cdcInsertCounter":
suggest = "cdc_insert_counter"
elif key == "cdcUpdateCounter":
suggest = "cdc_update_counter"
elif key == "databaseName":
suggest = "database_name"
elif key == "endedOn":
suggest = "ended_on"
elif key == "fullLoadCompletedTables":
suggest = "full_load_completed_tables"
elif key == "fullLoadErroredTables":
suggest = "full_load_errored_tables"
elif key == "fullLoadLoadingTables":
suggest = "full_load_loading_tables"
elif key == "fullLoadQueuedTables":
suggest = "full_load_queued_tables"
elif key == "incomingChanges":
suggest = "incoming_changes"
elif key == "initializationCompleted":
suggest = "initialization_completed"
elif key == "migrationState":
suggest = "migration_state"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
applied_changes: float,
cdc_delete_counter: float,
cdc_insert_counter: float,
cdc_update_counter: float,
database_name: str,
ended_on: str,
full_load_completed_tables: float,
full_load_errored_tables: float,
full_load_loading_tables: float,
full_load_queued_tables: float,
id: str,
incoming_changes: float,
initialization_completed: bool,
latency: float,
migration_state: str,
result_type: str,
started_on: str):
"""
:param float applied_changes: Number of applied changes
:param float cdc_delete_counter: Number of cdc deletes
:param float cdc_insert_counter: Number of cdc inserts
:param float cdc_update_counter: Number of cdc updates
:param str database_name: Name of the database
:param str ended_on: Migration end time
:param float full_load_completed_tables: Number of tables completed in full load
:param float full_load_errored_tables: Number of tables errored in full load
:param float full_load_loading_tables: Number of tables loading in full load
:param float full_load_queued_tables: Number of tables queued in full load
:param str id: Result identifier
:param float incoming_changes: Number of incoming changes
:param bool initialization_completed: Indicates if initial load (full load) has been completed
:param float latency: CDC apply latency
:param str migration_state: Migration state that this database is in
:param str result_type: Result type
Expected value is 'DatabaseLevelOutput'.
:param str started_on: Migration start time
"""
pulumi.set(__self__, "applied_changes", applied_changes)
pulumi.set(__self__, "cdc_delete_counter", cdc_delete_counter)
pulumi.set(__self__, "cdc_insert_counter", cdc_insert_counter)
pulumi.set(__self__, "cdc_update_counter", cdc_update_counter)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "full_load_completed_tables", full_load_completed_tables)
pulumi.set(__self__, "full_load_errored_tables", full_load_errored_tables)
pulumi.set(__self__, "full_load_loading_tables", full_load_loading_tables)
pulumi.set(__self__, "full_load_queued_tables", full_load_queued_tables)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "incoming_changes", incoming_changes)
pulumi.set(__self__, "initialization_completed", initialization_completed)
pulumi.set(__self__, "latency", latency)
pulumi.set(__self__, "migration_state", migration_state)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "started_on", started_on)
@property
@pulumi.getter(name="appliedChanges")
def applied_changes(self) -> float:
"""
Number of applied changes
"""
return pulumi.get(self, "applied_changes")
@property
@pulumi.getter(name="cdcDeleteCounter")
def cdc_delete_counter(self) -> float:
"""
Number of cdc deletes
"""
return pulumi.get(self, "cdc_delete_counter")
@property
@pulumi.getter(name="cdcInsertCounter")
def cdc_insert_counter(self) -> float:
"""
Number of cdc inserts
"""
return pulumi.get(self, "cdc_insert_counter")
@property
@pulumi.getter(name="cdcUpdateCounter")
def cdc_update_counter(self) -> float:
"""
Number of cdc updates
"""
return pulumi.get(self, "cdc_update_counter")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="fullLoadCompletedTables")
def full_load_completed_tables(self) -> float:
"""
Number of tables completed in full load
"""
return pulumi.get(self, "full_load_completed_tables")
@property
@pulumi.getter(name="fullLoadErroredTables")
def full_load_errored_tables(self) -> float:
"""
Number of tables errored in full load
"""
return pulumi.get(self, "full_load_errored_tables")
@property
@pulumi.getter(name="fullLoadLoadingTables")
def full_load_loading_tables(self) -> float:
"""
Number of tables loading in full load
"""
return pulumi.get(self, "full_load_loading_tables")
@property
@pulumi.getter(name="fullLoadQueuedTables")
def full_load_queued_tables(self) -> float:
"""
Number of tables queued in full load
"""
return pulumi.get(self, "full_load_queued_tables")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incomingChanges")
def incoming_changes(self) -> float:
"""
Number of incoming changes
"""
return pulumi.get(self, "incoming_changes")
@property
@pulumi.getter(name="initializationCompleted")
def initialization_completed(self) -> bool:
"""
Indicates if initial load (full load) has been completed
"""
return pulumi.get(self, "initialization_completed")
@property
@pulumi.getter
def latency(self) -> float:
"""
CDC apply latency
"""
return pulumi.get(self, "latency")
@property
@pulumi.getter(name="migrationState")
def migration_state(self) -> str:
"""
Migration state that this database is in
"""
return pulumi.get(self, "migration_state")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskOutputErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskOutputErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
error: 'outputs.ReportableExceptionResponse',
id: str,
result_type: str):
"""
:param 'ReportableExceptionResponse' error: Migration error
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'ErrorOutput'.
"""
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'ErrorOutput')
@property
@pulumi.getter
def error(self) -> 'outputs.ReportableExceptionResponse':
"""
Migration error
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'ErrorOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceServer":
suggest = "source_server"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetServer":
suggest = "target_server"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
id: str,
result_type: str,
source_server: str,
source_server_version: str,
started_on: str,
target_server: str,
target_server_version: str):
"""
:param str ended_on: Migration end time
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'MigrationLevelOutput'.
:param str source_server: Source server name
:param str source_server_version: Source server version
:param str started_on: Migration start time
:param str target_server: Target server name
:param str target_server_version: Target server version
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'MigrationLevelOutput')
pulumi.set(__self__, "source_server", source_server)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "target_server", target_server)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceServer")
def source_server(self) -> str:
"""
Source server name
"""
return pulumi.get(self, "source_server")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter(name="targetServer")
def target_server(self) -> str:
"""
Target server name
"""
return pulumi.get(self, "target_server")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cdcDeleteCounter":
suggest = "cdc_delete_counter"
elif key == "cdcInsertCounter":
suggest = "cdc_insert_counter"
elif key == "cdcUpdateCounter":
suggest = "cdc_update_counter"
elif key == "dataErrorsCounter":
suggest = "data_errors_counter"
elif key == "databaseName":
suggest = "database_name"
elif key == "fullLoadEndedOn":
suggest = "full_load_ended_on"
elif key == "fullLoadEstFinishTime":
suggest = "full_load_est_finish_time"
elif key == "fullLoadStartedOn":
suggest = "full_load_started_on"
elif key == "fullLoadTotalRows":
suggest = "full_load_total_rows"
elif key == "lastModifiedTime":
suggest = "last_modified_time"
elif key == "resultType":
suggest = "result_type"
elif key == "tableName":
suggest = "table_name"
elif key == "totalChangesApplied":
suggest = "total_changes_applied"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cdc_delete_counter: str,
cdc_insert_counter: str,
cdc_update_counter: str,
data_errors_counter: float,
database_name: str,
full_load_ended_on: str,
full_load_est_finish_time: str,
full_load_started_on: str,
full_load_total_rows: float,
id: str,
last_modified_time: str,
result_type: str,
state: str,
table_name: str,
total_changes_applied: float):
"""
:param str cdc_delete_counter: Number of applied deletes
:param str cdc_insert_counter: Number of applied inserts
:param str cdc_update_counter: Number of applied updates
:param float data_errors_counter: Number of data errors occurred
:param str database_name: Name of the database
:param str full_load_ended_on: Full load end time
:param str full_load_est_finish_time: Estimate to finish full load
:param str full_load_started_on: Full load start time
:param float full_load_total_rows: Number of rows applied in full load
:param str id: Result identifier
:param str last_modified_time: Last modified time on target
:param str result_type: Result type
Expected value is 'TableLevelOutput'.
:param str state: Current state of the table migration
:param str table_name: Name of the table
:param float total_changes_applied: Total number of applied changes
"""
pulumi.set(__self__, "cdc_delete_counter", cdc_delete_counter)
pulumi.set(__self__, "cdc_insert_counter", cdc_insert_counter)
pulumi.set(__self__, "cdc_update_counter", cdc_update_counter)
pulumi.set(__self__, "data_errors_counter", data_errors_counter)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "full_load_ended_on", full_load_ended_on)
pulumi.set(__self__, "full_load_est_finish_time", full_load_est_finish_time)
pulumi.set(__self__, "full_load_started_on", full_load_started_on)
pulumi.set(__self__, "full_load_total_rows", full_load_total_rows)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "last_modified_time", last_modified_time)
pulumi.set(__self__, "result_type", 'TableLevelOutput')
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "table_name", table_name)
pulumi.set(__self__, "total_changes_applied", total_changes_applied)
@property
@pulumi.getter(name="cdcDeleteCounter")
def cdc_delete_counter(self) -> str:
"""
Number of applied deletes
"""
return pulumi.get(self, "cdc_delete_counter")
@property
@pulumi.getter(name="cdcInsertCounter")
def cdc_insert_counter(self) -> str:
"""
Number of applied inserts
"""
return pulumi.get(self, "cdc_insert_counter")
@property
@pulumi.getter(name="cdcUpdateCounter")
def cdc_update_counter(self) -> str:
"""
Number of applied updates
"""
return pulumi.get(self, "cdc_update_counter")
@property
@pulumi.getter(name="dataErrorsCounter")
def data_errors_counter(self) -> float:
"""
Number of data errors occurred
"""
return pulumi.get(self, "data_errors_counter")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="fullLoadEndedOn")
def full_load_ended_on(self) -> str:
"""
Full load end time
"""
return pulumi.get(self, "full_load_ended_on")
@property
@pulumi.getter(name="fullLoadEstFinishTime")
def full_load_est_finish_time(self) -> str:
"""
Estimate to finish full load
"""
return pulumi.get(self, "full_load_est_finish_time")
@property
@pulumi.getter(name="fullLoadStartedOn")
def full_load_started_on(self) -> str:
"""
Full load start time
"""
return pulumi.get(self, "full_load_started_on")
@property
@pulumi.getter(name="fullLoadTotalRows")
def full_load_total_rows(self) -> float:
"""
Number of rows applied in full load
"""
return pulumi.get(self, "full_load_total_rows")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> str:
"""
Last modified time on target
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'TableLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of the table migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="tableName")
def table_name(self) -> str:
"""
Name of the table
"""
return pulumi.get(self, "table_name")
@property
@pulumi.getter(name="totalChangesApplied")
def total_changes_applied(self) -> float:
"""
Total number of applied changes
"""
return pulumi.get(self, "total_changes_applied")
@pulumi.output_type
class MigrateMySqlAzureDbForMySqlSyncTaskPropertiesResponse(dict):
"""
Properties for the task that migrates MySQL databases to Azure Database for MySQL for online migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateMySqlAzureDbForMySqlSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateMySqlAzureDbForMySqlSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.MigrateMySqlAzureDbForMySqlSyncTaskInputResponse'] = None):
"""
Properties for the task that migrates MySQL databases to Azure Database for MySQL for online migrations
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseErrorResponse', 'MigrateMySqlAzureDbForMySqlSyncTaskOutputDatabaseLevelResponse', 'MigrateMySqlAzureDbForMySqlSyncTaskOutputErrorResponse', 'MigrateMySqlAzureDbForMySqlSyncTaskOutputMigrationLevelResponse', 'MigrateMySqlAzureDbForMySqlSyncTaskOutputTableLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'Migrate.MySql.AzureDbForMySql.Sync'.
:param 'MigrateMySqlAzureDbForMySqlSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'Migrate.MySql.AzureDbForMySql.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'Migrate.MySql.AzureDbForMySql.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateMySqlAzureDbForMySqlSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse(dict):
"""
Database specific information for PostgreSQL to Azure Database for PostgreSQL migration task inputs
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "migrationSetting":
suggest = "migration_setting"
elif key == "sourceSetting":
suggest = "source_setting"
elif key == "targetDatabaseName":
suggest = "target_database_name"
elif key == "targetSetting":
suggest = "target_setting"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
migration_setting: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
source_setting: Optional[Mapping[str, str]] = None,
target_database_name: Optional[str] = None,
target_setting: Optional[Mapping[str, str]] = None):
"""
Database specific information for PostgreSQL to Azure Database for PostgreSQL migration task inputs
:param Mapping[str, str] migration_setting: Migration settings which tune the migration behavior
:param str name: Name of the database
:param Mapping[str, str] source_setting: Source settings to tune source endpoint migration behavior
:param str target_database_name: Name of target database. Note: Target database will be truncated before starting migration.
:param Mapping[str, str] target_setting: Target settings to tune target endpoint migration behavior
"""
if migration_setting is not None:
pulumi.set(__self__, "migration_setting", migration_setting)
if name is not None:
pulumi.set(__self__, "name", name)
if source_setting is not None:
pulumi.set(__self__, "source_setting", source_setting)
if target_database_name is not None:
pulumi.set(__self__, "target_database_name", target_database_name)
if target_setting is not None:
pulumi.set(__self__, "target_setting", target_setting)
@property
@pulumi.getter(name="migrationSetting")
def migration_setting(self) -> Optional[Mapping[str, str]]:
"""
Migration settings which tune the migration behavior
"""
return pulumi.get(self, "migration_setting")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="sourceSetting")
def source_setting(self) -> Optional[Mapping[str, str]]:
"""
Source settings to tune source endpoint migration behavior
"""
return pulumi.get(self, "source_setting")
@property
@pulumi.getter(name="targetDatabaseName")
def target_database_name(self) -> Optional[str]:
"""
Name of target database. Note: Target database will be truncated before starting migration.
"""
return pulumi.get(self, "target_database_name")
@property
@pulumi.getter(name="targetSetting")
def target_setting(self) -> Optional[Mapping[str, str]]:
"""
Target settings to tune target endpoint migration behavior
"""
return pulumi.get(self, "target_setting")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse(dict):
"""
Input for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
selected_databases: Sequence['outputs.MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse'],
source_connection_info: 'outputs.PostgreSqlConnectionInfoResponse',
target_connection_info: 'outputs.PostgreSqlConnectionInfoResponse'):
"""
Input for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations
:param Sequence['MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'PostgreSqlConnectionInfoResponse' source_connection_info: Connection information for source PostgreSQL
:param 'PostgreSqlConnectionInfoResponse' target_connection_info: Connection information for target Azure Database for PostgreSQL
"""
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigratePostgreSqlAzureDbForPostgreSqlSyncDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.PostgreSqlConnectionInfoResponse':
"""
Connection information for source PostgreSQL
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.PostgreSqlConnectionInfoResponse':
"""
Connection information for target Azure Database for PostgreSQL
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
elif key == "errorMessage":
suggest = "error_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
result_type: str,
error_message: Optional[str] = None,
events: Optional[Sequence['outputs.SyncMigrationDatabaseErrorEventResponse']] = None):
"""
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'DatabaseLevelErrorOutput'.
:param str error_message: Error message
:param Sequence['SyncMigrationDatabaseErrorEventResponse'] events: List of error events.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'DatabaseLevelErrorOutput')
if error_message is not None:
pulumi.set(__self__, "error_message", error_message)
if events is not None:
pulumi.set(__self__, "events", events)
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelErrorOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> Optional[str]:
"""
Error message
"""
return pulumi.get(self, "error_message")
@property
@pulumi.getter
def events(self) -> Optional[Sequence['outputs.SyncMigrationDatabaseErrorEventResponse']]:
"""
List of error events.
"""
return pulumi.get(self, "events")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appliedChanges":
suggest = "applied_changes"
elif key == "cdcDeleteCounter":
suggest = "cdc_delete_counter"
elif key == "cdcInsertCounter":
suggest = "cdc_insert_counter"
elif key == "cdcUpdateCounter":
suggest = "cdc_update_counter"
elif key == "databaseName":
suggest = "database_name"
elif key == "endedOn":
suggest = "ended_on"
elif key == "fullLoadCompletedTables":
suggest = "full_load_completed_tables"
elif key == "fullLoadErroredTables":
suggest = "full_load_errored_tables"
elif key == "fullLoadLoadingTables":
suggest = "full_load_loading_tables"
elif key == "fullLoadQueuedTables":
suggest = "full_load_queued_tables"
elif key == "incomingChanges":
suggest = "incoming_changes"
elif key == "initializationCompleted":
suggest = "initialization_completed"
elif key == "migrationState":
suggest = "migration_state"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
applied_changes: float,
cdc_delete_counter: float,
cdc_insert_counter: float,
cdc_update_counter: float,
database_name: str,
ended_on: str,
full_load_completed_tables: float,
full_load_errored_tables: float,
full_load_loading_tables: float,
full_load_queued_tables: float,
id: str,
incoming_changes: float,
initialization_completed: bool,
latency: float,
migration_state: str,
result_type: str,
started_on: str):
"""
:param float applied_changes: Number of applied changes
:param float cdc_delete_counter: Number of cdc deletes
:param float cdc_insert_counter: Number of cdc inserts
:param float cdc_update_counter: Number of cdc updates
:param str database_name: Name of the database
:param str ended_on: Migration end time
:param float full_load_completed_tables: Number of tables completed in full load
:param float full_load_errored_tables: Number of tables errored in full load
:param float full_load_loading_tables: Number of tables loading in full load
:param float full_load_queued_tables: Number of tables queued in full load
:param str id: Result identifier
:param float incoming_changes: Number of incoming changes
:param bool initialization_completed: Indicates if initial load (full load) has been completed
:param float latency: CDC apply latency
:param str migration_state: Migration state that this database is in
:param str result_type: Result type
Expected value is 'DatabaseLevelOutput'.
:param str started_on: Migration start time
"""
pulumi.set(__self__, "applied_changes", applied_changes)
pulumi.set(__self__, "cdc_delete_counter", cdc_delete_counter)
pulumi.set(__self__, "cdc_insert_counter", cdc_insert_counter)
pulumi.set(__self__, "cdc_update_counter", cdc_update_counter)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "full_load_completed_tables", full_load_completed_tables)
pulumi.set(__self__, "full_load_errored_tables", full_load_errored_tables)
pulumi.set(__self__, "full_load_loading_tables", full_load_loading_tables)
pulumi.set(__self__, "full_load_queued_tables", full_load_queued_tables)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "incoming_changes", incoming_changes)
pulumi.set(__self__, "initialization_completed", initialization_completed)
pulumi.set(__self__, "latency", latency)
pulumi.set(__self__, "migration_state", migration_state)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "started_on", started_on)
@property
@pulumi.getter(name="appliedChanges")
def applied_changes(self) -> float:
"""
Number of applied changes
"""
return pulumi.get(self, "applied_changes")
@property
@pulumi.getter(name="cdcDeleteCounter")
def cdc_delete_counter(self) -> float:
"""
Number of cdc deletes
"""
return pulumi.get(self, "cdc_delete_counter")
@property
@pulumi.getter(name="cdcInsertCounter")
def cdc_insert_counter(self) -> float:
"""
Number of cdc inserts
"""
return pulumi.get(self, "cdc_insert_counter")
@property
@pulumi.getter(name="cdcUpdateCounter")
def cdc_update_counter(self) -> float:
"""
Number of cdc updates
"""
return pulumi.get(self, "cdc_update_counter")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="fullLoadCompletedTables")
def full_load_completed_tables(self) -> float:
"""
Number of tables completed in full load
"""
return pulumi.get(self, "full_load_completed_tables")
@property
@pulumi.getter(name="fullLoadErroredTables")
def full_load_errored_tables(self) -> float:
"""
Number of tables errored in full load
"""
return pulumi.get(self, "full_load_errored_tables")
@property
@pulumi.getter(name="fullLoadLoadingTables")
def full_load_loading_tables(self) -> float:
"""
Number of tables loading in full load
"""
return pulumi.get(self, "full_load_loading_tables")
@property
@pulumi.getter(name="fullLoadQueuedTables")
def full_load_queued_tables(self) -> float:
"""
Number of tables queued in full load
"""
return pulumi.get(self, "full_load_queued_tables")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incomingChanges")
def incoming_changes(self) -> float:
"""
Number of incoming changes
"""
return pulumi.get(self, "incoming_changes")
@property
@pulumi.getter(name="initializationCompleted")
def initialization_completed(self) -> bool:
"""
Indicates if initial load (full load) has been completed
"""
return pulumi.get(self, "initialization_completed")
@property
@pulumi.getter
def latency(self) -> float:
"""
CDC apply latency
"""
return pulumi.get(self, "latency")
@property
@pulumi.getter(name="migrationState")
def migration_state(self) -> str:
"""
Migration state that this database is in
"""
return pulumi.get(self, "migration_state")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
error: 'outputs.ReportableExceptionResponse',
id: str,
result_type: str):
"""
:param 'ReportableExceptionResponse' error: Migration error
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'ErrorOutput'.
"""
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'ErrorOutput')
@property
@pulumi.getter
def error(self) -> 'outputs.ReportableExceptionResponse':
"""
Migration error
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'ErrorOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceServer":
suggest = "source_server"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetServer":
suggest = "target_server"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
id: str,
result_type: str,
source_server: str,
source_server_version: str,
started_on: str,
target_server: str,
target_server_version: str):
"""
:param str ended_on: Migration end time
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'MigrationLevelOutput'.
:param str source_server: Source server name
:param str source_server_version: Source server version
:param str started_on: Migration start time
:param str target_server: Target server name
:param str target_server_version: Target server version
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'MigrationLevelOutput')
pulumi.set(__self__, "source_server", source_server)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "target_server", target_server)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceServer")
def source_server(self) -> str:
"""
Source server name
"""
return pulumi.get(self, "source_server")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter(name="targetServer")
def target_server(self) -> str:
"""
Target server name
"""
return pulumi.get(self, "target_server")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cdcDeleteCounter":
suggest = "cdc_delete_counter"
elif key == "cdcInsertCounter":
suggest = "cdc_insert_counter"
elif key == "cdcUpdateCounter":
suggest = "cdc_update_counter"
elif key == "dataErrorsCounter":
suggest = "data_errors_counter"
elif key == "databaseName":
suggest = "database_name"
elif key == "fullLoadEndedOn":
suggest = "full_load_ended_on"
elif key == "fullLoadEstFinishTime":
suggest = "full_load_est_finish_time"
elif key == "fullLoadStartedOn":
suggest = "full_load_started_on"
elif key == "fullLoadTotalRows":
suggest = "full_load_total_rows"
elif key == "lastModifiedTime":
suggest = "last_modified_time"
elif key == "resultType":
suggest = "result_type"
elif key == "tableName":
suggest = "table_name"
elif key == "totalChangesApplied":
suggest = "total_changes_applied"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cdc_delete_counter: float,
cdc_insert_counter: float,
cdc_update_counter: float,
data_errors_counter: float,
database_name: str,
full_load_ended_on: str,
full_load_est_finish_time: str,
full_load_started_on: str,
full_load_total_rows: float,
id: str,
last_modified_time: str,
result_type: str,
state: str,
table_name: str,
total_changes_applied: float):
"""
:param float cdc_delete_counter: Number of applied deletes
:param float cdc_insert_counter: Number of applied inserts
:param float cdc_update_counter: Number of applied updates
:param float data_errors_counter: Number of data errors occurred
:param str database_name: Name of the database
:param str full_load_ended_on: Full load end time
:param str full_load_est_finish_time: Estimate to finish full load
:param str full_load_started_on: Full load start time
:param float full_load_total_rows: Number of rows applied in full load
:param str id: Result identifier
:param str last_modified_time: Last modified time on target
:param str result_type: Result type
Expected value is 'TableLevelOutput'.
:param str state: Current state of the table migration
:param str table_name: Name of the table
:param float total_changes_applied: Total number of applied changes
"""
pulumi.set(__self__, "cdc_delete_counter", cdc_delete_counter)
pulumi.set(__self__, "cdc_insert_counter", cdc_insert_counter)
pulumi.set(__self__, "cdc_update_counter", cdc_update_counter)
pulumi.set(__self__, "data_errors_counter", data_errors_counter)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "full_load_ended_on", full_load_ended_on)
pulumi.set(__self__, "full_load_est_finish_time", full_load_est_finish_time)
pulumi.set(__self__, "full_load_started_on", full_load_started_on)
pulumi.set(__self__, "full_load_total_rows", full_load_total_rows)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "last_modified_time", last_modified_time)
pulumi.set(__self__, "result_type", 'TableLevelOutput')
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "table_name", table_name)
pulumi.set(__self__, "total_changes_applied", total_changes_applied)
@property
@pulumi.getter(name="cdcDeleteCounter")
def cdc_delete_counter(self) -> float:
"""
Number of applied deletes
"""
return pulumi.get(self, "cdc_delete_counter")
@property
@pulumi.getter(name="cdcInsertCounter")
def cdc_insert_counter(self) -> float:
"""
Number of applied inserts
"""
return pulumi.get(self, "cdc_insert_counter")
@property
@pulumi.getter(name="cdcUpdateCounter")
def cdc_update_counter(self) -> float:
"""
Number of applied updates
"""
return pulumi.get(self, "cdc_update_counter")
@property
@pulumi.getter(name="dataErrorsCounter")
def data_errors_counter(self) -> float:
"""
Number of data errors occurred
"""
return pulumi.get(self, "data_errors_counter")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="fullLoadEndedOn")
def full_load_ended_on(self) -> str:
"""
Full load end time
"""
return pulumi.get(self, "full_load_ended_on")
@property
@pulumi.getter(name="fullLoadEstFinishTime")
def full_load_est_finish_time(self) -> str:
"""
Estimate to finish full load
"""
return pulumi.get(self, "full_load_est_finish_time")
@property
@pulumi.getter(name="fullLoadStartedOn")
def full_load_started_on(self) -> str:
"""
Full load start time
"""
return pulumi.get(self, "full_load_started_on")
@property
@pulumi.getter(name="fullLoadTotalRows")
def full_load_total_rows(self) -> float:
"""
Number of rows applied in full load
"""
return pulumi.get(self, "full_load_total_rows")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> str:
"""
Last modified time on target
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'TableLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of the table migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="tableName")
def table_name(self) -> str:
"""
Name of the table
"""
return pulumi.get(self, "table_name")
@property
@pulumi.getter(name="totalChangesApplied")
def total_changes_applied(self) -> float:
"""
Total number of applied changes
"""
return pulumi.get(self, "total_changes_applied")
@pulumi.output_type
class MigratePostgreSqlAzureDbForPostgreSqlSyncTaskPropertiesResponse(dict):
"""
Properties for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigratePostgreSqlAzureDbForPostgreSqlSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigratePostgreSqlAzureDbForPostgreSqlSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse'] = None):
"""
Properties for the task that migrates PostgreSQL databases to Azure Database for PostgreSQL for online migrations
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseErrorResponse', 'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputDatabaseLevelResponse', 'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputErrorResponse', 'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputMigrationLevelResponse', 'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskOutputTableLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'Migrate.PostgreSql.AzureDbForPostgreSql.Sync'.
:param 'MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'Migrate.PostgreSql.AzureDbForPostgreSql.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'Migrate.PostgreSql.AzureDbForPostgreSql.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigratePostgreSqlAzureDbForPostgreSqlSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrateSqlServerSqlDbDatabaseInputResponse(dict):
"""
Database specific information for SQL to Azure SQL DB migration task inputs
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "makeSourceDbReadOnly":
suggest = "make_source_db_read_only"
elif key == "tableMap":
suggest = "table_map"
elif key == "targetDatabaseName":
suggest = "target_database_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbDatabaseInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbDatabaseInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbDatabaseInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
make_source_db_read_only: Optional[bool] = None,
name: Optional[str] = None,
table_map: Optional[Mapping[str, str]] = None,
target_database_name: Optional[str] = None):
"""
Database specific information for SQL to Azure SQL DB migration task inputs
:param bool make_source_db_read_only: Whether to set database read only before migration
:param str name: Name of the database
:param Mapping[str, str] table_map: Mapping of source to target tables
:param str target_database_name: Name of target database. Note: Target database will be truncated before starting migration.
"""
if make_source_db_read_only is not None:
pulumi.set(__self__, "make_source_db_read_only", make_source_db_read_only)
if name is not None:
pulumi.set(__self__, "name", name)
if table_map is not None:
pulumi.set(__self__, "table_map", table_map)
if target_database_name is not None:
pulumi.set(__self__, "target_database_name", target_database_name)
@property
@pulumi.getter(name="makeSourceDbReadOnly")
def make_source_db_read_only(self) -> Optional[bool]:
"""
Whether to set database read only before migration
"""
return pulumi.get(self, "make_source_db_read_only")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="tableMap")
def table_map(self) -> Optional[Mapping[str, str]]:
"""
Mapping of source to target tables
"""
return pulumi.get(self, "table_map")
@property
@pulumi.getter(name="targetDatabaseName")
def target_database_name(self) -> Optional[str]:
"""
Name of target database. Note: Target database will be truncated before starting migration.
"""
return pulumi.get(self, "target_database_name")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncDatabaseInputResponse(dict):
"""
Database specific information for SQL to Azure SQL DB sync migration task inputs
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "migrationSetting":
suggest = "migration_setting"
elif key == "schemaName":
suggest = "schema_name"
elif key == "sourceSetting":
suggest = "source_setting"
elif key == "tableMap":
suggest = "table_map"
elif key == "targetDatabaseName":
suggest = "target_database_name"
elif key == "targetSetting":
suggest = "target_setting"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncDatabaseInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncDatabaseInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncDatabaseInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: Optional[str] = None,
migration_setting: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
schema_name: Optional[str] = None,
source_setting: Optional[Mapping[str, str]] = None,
table_map: Optional[Mapping[str, str]] = None,
target_database_name: Optional[str] = None,
target_setting: Optional[Mapping[str, str]] = None):
"""
Database specific information for SQL to Azure SQL DB sync migration task inputs
:param str id: Unique identifier for database
:param Mapping[str, str] migration_setting: Migration settings which tune the migration behavior
:param str name: Name of database
:param str schema_name: Schema name to be migrated
:param Mapping[str, str] source_setting: Source settings to tune source endpoint migration behavior
:param Mapping[str, str] table_map: Mapping of source to target tables
:param str target_database_name: Target database name
:param Mapping[str, str] target_setting: Target settings to tune target endpoint migration behavior
"""
if id is not None:
pulumi.set(__self__, "id", id)
if migration_setting is not None:
pulumi.set(__self__, "migration_setting", migration_setting)
if name is not None:
pulumi.set(__self__, "name", name)
if schema_name is not None:
pulumi.set(__self__, "schema_name", schema_name)
if source_setting is not None:
pulumi.set(__self__, "source_setting", source_setting)
if table_map is not None:
pulumi.set(__self__, "table_map", table_map)
if target_database_name is not None:
pulumi.set(__self__, "target_database_name", target_database_name)
if target_setting is not None:
pulumi.set(__self__, "target_setting", target_setting)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Unique identifier for database
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="migrationSetting")
def migration_setting(self) -> Optional[Mapping[str, str]]:
"""
Migration settings which tune the migration behavior
"""
return pulumi.get(self, "migration_setting")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="schemaName")
def schema_name(self) -> Optional[str]:
"""
Schema name to be migrated
"""
return pulumi.get(self, "schema_name")
@property
@pulumi.getter(name="sourceSetting")
def source_setting(self) -> Optional[Mapping[str, str]]:
"""
Source settings to tune source endpoint migration behavior
"""
return pulumi.get(self, "source_setting")
@property
@pulumi.getter(name="tableMap")
def table_map(self) -> Optional[Mapping[str, str]]:
"""
Mapping of source to target tables
"""
return pulumi.get(self, "table_map")
@property
@pulumi.getter(name="targetDatabaseName")
def target_database_name(self) -> Optional[str]:
"""
Target database name
"""
return pulumi.get(self, "target_database_name")
@property
@pulumi.getter(name="targetSetting")
def target_setting(self) -> Optional[Mapping[str, str]]:
"""
Target settings to tune target endpoint migration behavior
"""
return pulumi.get(self, "target_setting")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskInputResponse(dict):
"""
Input for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
elif key == "validationOptions":
suggest = "validation_options"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
selected_databases: Sequence['outputs.MigrateSqlServerSqlDbSyncDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse',
validation_options: Optional['outputs.MigrationValidationOptionsResponse'] = None):
"""
Input for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations
:param Sequence['MigrateSqlServerSqlDbSyncDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Information for connecting to source
:param 'SqlConnectionInfoResponse' target_connection_info: Information for connecting to target
:param 'MigrationValidationOptionsResponse' validation_options: Validation options
"""
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
if validation_options is not None:
pulumi.set(__self__, "validation_options", validation_options)
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlDbSyncDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="validationOptions")
def validation_options(self) -> Optional['outputs.MigrationValidationOptionsResponse']:
"""
Validation options
"""
return pulumi.get(self, "validation_options")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskOutputDatabaseErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
elif key == "errorMessage":
suggest = "error_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskOutputDatabaseErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputDatabaseErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputDatabaseErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
result_type: str,
error_message: Optional[str] = None,
events: Optional[Sequence['outputs.SyncMigrationDatabaseErrorEventResponse']] = None):
"""
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'DatabaseLevelErrorOutput'.
:param str error_message: Error message
:param Sequence['SyncMigrationDatabaseErrorEventResponse'] events: List of error events.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'DatabaseLevelErrorOutput')
if error_message is not None:
pulumi.set(__self__, "error_message", error_message)
if events is not None:
pulumi.set(__self__, "events", events)
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelErrorOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="errorMessage")
def error_message(self) -> Optional[str]:
"""
Error message
"""
return pulumi.get(self, "error_message")
@property
@pulumi.getter
def events(self) -> Optional[Sequence['outputs.SyncMigrationDatabaseErrorEventResponse']]:
"""
List of error events.
"""
return pulumi.get(self, "events")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "appliedChanges":
suggest = "applied_changes"
elif key == "cdcDeleteCounter":
suggest = "cdc_delete_counter"
elif key == "cdcInsertCounter":
suggest = "cdc_insert_counter"
elif key == "cdcUpdateCounter":
suggest = "cdc_update_counter"
elif key == "databaseName":
suggest = "database_name"
elif key == "endedOn":
suggest = "ended_on"
elif key == "fullLoadCompletedTables":
suggest = "full_load_completed_tables"
elif key == "fullLoadErroredTables":
suggest = "full_load_errored_tables"
elif key == "fullLoadLoadingTables":
suggest = "full_load_loading_tables"
elif key == "fullLoadQueuedTables":
suggest = "full_load_queued_tables"
elif key == "incomingChanges":
suggest = "incoming_changes"
elif key == "initializationCompleted":
suggest = "initialization_completed"
elif key == "migrationState":
suggest = "migration_state"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
applied_changes: float,
cdc_delete_counter: float,
cdc_insert_counter: float,
cdc_update_counter: float,
database_name: str,
ended_on: str,
full_load_completed_tables: float,
full_load_errored_tables: float,
full_load_loading_tables: float,
full_load_queued_tables: float,
id: str,
incoming_changes: float,
initialization_completed: bool,
latency: float,
migration_state: str,
result_type: str,
started_on: str):
"""
:param float applied_changes: Number of applied changes
:param float cdc_delete_counter: Number of cdc deletes
:param float cdc_insert_counter: Number of cdc inserts
:param float cdc_update_counter: Number of cdc updates
:param str database_name: Name of the database
:param str ended_on: Migration end time
:param float full_load_completed_tables: Number of tables completed in full load
:param float full_load_errored_tables: Number of tables errored in full load
:param float full_load_loading_tables: Number of tables loading in full load
:param float full_load_queued_tables: Number of tables queued in full load
:param str id: Result identifier
:param float incoming_changes: Number of incoming changes
:param bool initialization_completed: Indicates if initial load (full load) has been completed
:param float latency: CDC apply latency
:param str migration_state: Migration state that this database is in
:param str result_type: Result type
Expected value is 'DatabaseLevelOutput'.
:param str started_on: Migration start time
"""
pulumi.set(__self__, "applied_changes", applied_changes)
pulumi.set(__self__, "cdc_delete_counter", cdc_delete_counter)
pulumi.set(__self__, "cdc_insert_counter", cdc_insert_counter)
pulumi.set(__self__, "cdc_update_counter", cdc_update_counter)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "full_load_completed_tables", full_load_completed_tables)
pulumi.set(__self__, "full_load_errored_tables", full_load_errored_tables)
pulumi.set(__self__, "full_load_loading_tables", full_load_loading_tables)
pulumi.set(__self__, "full_load_queued_tables", full_load_queued_tables)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "incoming_changes", incoming_changes)
pulumi.set(__self__, "initialization_completed", initialization_completed)
pulumi.set(__self__, "latency", latency)
pulumi.set(__self__, "migration_state", migration_state)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "started_on", started_on)
@property
@pulumi.getter(name="appliedChanges")
def applied_changes(self) -> float:
"""
Number of applied changes
"""
return pulumi.get(self, "applied_changes")
@property
@pulumi.getter(name="cdcDeleteCounter")
def cdc_delete_counter(self) -> float:
"""
Number of cdc deletes
"""
return pulumi.get(self, "cdc_delete_counter")
@property
@pulumi.getter(name="cdcInsertCounter")
def cdc_insert_counter(self) -> float:
"""
Number of cdc inserts
"""
return pulumi.get(self, "cdc_insert_counter")
@property
@pulumi.getter(name="cdcUpdateCounter")
def cdc_update_counter(self) -> float:
"""
Number of cdc updates
"""
return pulumi.get(self, "cdc_update_counter")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="fullLoadCompletedTables")
def full_load_completed_tables(self) -> float:
"""
Number of tables completed in full load
"""
return pulumi.get(self, "full_load_completed_tables")
@property
@pulumi.getter(name="fullLoadErroredTables")
def full_load_errored_tables(self) -> float:
"""
Number of tables errored in full load
"""
return pulumi.get(self, "full_load_errored_tables")
@property
@pulumi.getter(name="fullLoadLoadingTables")
def full_load_loading_tables(self) -> float:
"""
Number of tables loading in full load
"""
return pulumi.get(self, "full_load_loading_tables")
@property
@pulumi.getter(name="fullLoadQueuedTables")
def full_load_queued_tables(self) -> float:
"""
Number of tables queued in full load
"""
return pulumi.get(self, "full_load_queued_tables")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="incomingChanges")
def incoming_changes(self) -> float:
"""
Number of incoming changes
"""
return pulumi.get(self, "incoming_changes")
@property
@pulumi.getter(name="initializationCompleted")
def initialization_completed(self) -> bool:
"""
Indicates if initial load (full load) has been completed
"""
return pulumi.get(self, "initialization_completed")
@property
@pulumi.getter
def latency(self) -> float:
"""
CDC apply latency
"""
return pulumi.get(self, "latency")
@property
@pulumi.getter(name="migrationState")
def migration_state(self) -> str:
"""
Migration state that this database is in
"""
return pulumi.get(self, "migration_state")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskOutputErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskOutputErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
error: 'outputs.ReportableExceptionResponse',
id: str,
result_type: str):
"""
:param 'ReportableExceptionResponse' error: Migration error
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'ErrorOutput'.
"""
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'ErrorOutput')
@property
@pulumi.getter
def error(self) -> 'outputs.ReportableExceptionResponse':
"""
Migration error
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'ErrorOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskOutputMigrationLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseCount":
suggest = "database_count"
elif key == "endedOn":
suggest = "ended_on"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceServer":
suggest = "source_server"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetServer":
suggest = "target_server"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskOutputMigrationLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_count: int,
ended_on: str,
id: str,
result_type: str,
source_server: str,
source_server_version: str,
started_on: str,
target_server: str,
target_server_version: str):
"""
:param int database_count: Count of databases
:param str ended_on: Migration end time
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'MigrationLevelOutput'.
:param str source_server: Source server name
:param str source_server_version: Source server version
:param str started_on: Migration start time
:param str target_server: Target server name
:param str target_server_version: Target server version
"""
pulumi.set(__self__, "database_count", database_count)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'MigrationLevelOutput')
pulumi.set(__self__, "source_server", source_server)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "target_server", target_server)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter(name="databaseCount")
def database_count(self) -> int:
"""
Count of databases
"""
return pulumi.get(self, "database_count")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceServer")
def source_server(self) -> str:
"""
Source server name
"""
return pulumi.get(self, "source_server")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter(name="targetServer")
def target_server(self) -> str:
"""
Target server name
"""
return pulumi.get(self, "target_server")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskOutputTableLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cdcDeleteCounter":
suggest = "cdc_delete_counter"
elif key == "cdcInsertCounter":
suggest = "cdc_insert_counter"
elif key == "cdcUpdateCounter":
suggest = "cdc_update_counter"
elif key == "dataErrorsCounter":
suggest = "data_errors_counter"
elif key == "databaseName":
suggest = "database_name"
elif key == "fullLoadEndedOn":
suggest = "full_load_ended_on"
elif key == "fullLoadEstFinishTime":
suggest = "full_load_est_finish_time"
elif key == "fullLoadStartedOn":
suggest = "full_load_started_on"
elif key == "fullLoadTotalRows":
suggest = "full_load_total_rows"
elif key == "lastModifiedTime":
suggest = "last_modified_time"
elif key == "resultType":
suggest = "result_type"
elif key == "tableName":
suggest = "table_name"
elif key == "totalChangesApplied":
suggest = "total_changes_applied"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskOutputTableLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputTableLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskOutputTableLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cdc_delete_counter: float,
cdc_insert_counter: float,
cdc_update_counter: float,
data_errors_counter: float,
database_name: str,
full_load_ended_on: str,
full_load_est_finish_time: str,
full_load_started_on: str,
full_load_total_rows: float,
id: str,
last_modified_time: str,
result_type: str,
state: str,
table_name: str,
total_changes_applied: float):
"""
:param float cdc_delete_counter: Number of applied deletes
:param float cdc_insert_counter: Number of applied inserts
:param float cdc_update_counter: Number of applied updates
:param float data_errors_counter: Number of data errors occurred
:param str database_name: Name of the database
:param str full_load_ended_on: Full load end time
:param str full_load_est_finish_time: Estimate to finish full load
:param str full_load_started_on: Full load start time
:param float full_load_total_rows: Number of rows applied in full load
:param str id: Result identifier
:param str last_modified_time: Last modified time on target
:param str result_type: Result type
Expected value is 'TableLevelOutput'.
:param str state: Current state of the table migration
:param str table_name: Name of the table
:param float total_changes_applied: Total number of applied changes
"""
pulumi.set(__self__, "cdc_delete_counter", cdc_delete_counter)
pulumi.set(__self__, "cdc_insert_counter", cdc_insert_counter)
pulumi.set(__self__, "cdc_update_counter", cdc_update_counter)
pulumi.set(__self__, "data_errors_counter", data_errors_counter)
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "full_load_ended_on", full_load_ended_on)
pulumi.set(__self__, "full_load_est_finish_time", full_load_est_finish_time)
pulumi.set(__self__, "full_load_started_on", full_load_started_on)
pulumi.set(__self__, "full_load_total_rows", full_load_total_rows)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "last_modified_time", last_modified_time)
pulumi.set(__self__, "result_type", 'TableLevelOutput')
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "table_name", table_name)
pulumi.set(__self__, "total_changes_applied", total_changes_applied)
@property
@pulumi.getter(name="cdcDeleteCounter")
def cdc_delete_counter(self) -> float:
"""
Number of applied deletes
"""
return pulumi.get(self, "cdc_delete_counter")
@property
@pulumi.getter(name="cdcInsertCounter")
def cdc_insert_counter(self) -> float:
"""
Number of applied inserts
"""
return pulumi.get(self, "cdc_insert_counter")
@property
@pulumi.getter(name="cdcUpdateCounter")
def cdc_update_counter(self) -> float:
"""
Number of applied updates
"""
return pulumi.get(self, "cdc_update_counter")
@property
@pulumi.getter(name="dataErrorsCounter")
def data_errors_counter(self) -> float:
"""
Number of data errors occurred
"""
return pulumi.get(self, "data_errors_counter")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="fullLoadEndedOn")
def full_load_ended_on(self) -> str:
"""
Full load end time
"""
return pulumi.get(self, "full_load_ended_on")
@property
@pulumi.getter(name="fullLoadEstFinishTime")
def full_load_est_finish_time(self) -> str:
"""
Estimate to finish full load
"""
return pulumi.get(self, "full_load_est_finish_time")
@property
@pulumi.getter(name="fullLoadStartedOn")
def full_load_started_on(self) -> str:
"""
Full load start time
"""
return pulumi.get(self, "full_load_started_on")
@property
@pulumi.getter(name="fullLoadTotalRows")
def full_load_total_rows(self) -> float:
"""
Number of rows applied in full load
"""
return pulumi.get(self, "full_load_total_rows")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="lastModifiedTime")
def last_modified_time(self) -> str:
"""
Last modified time on target
"""
return pulumi.get(self, "last_modified_time")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'TableLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of the table migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="tableName")
def table_name(self) -> str:
"""
Name of the table
"""
return pulumi.get(self, "table_name")
@property
@pulumi.getter(name="totalChangesApplied")
def total_changes_applied(self) -> float:
"""
Total number of applied changes
"""
return pulumi.get(self, "total_changes_applied")
@pulumi.output_type
class MigrateSqlServerSqlDbSyncTaskPropertiesResponse(dict):
"""
Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.MigrateSqlServerSqlDbSyncTaskInputResponse'] = None):
"""
Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database for online migrations
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['MigrateSqlServerSqlDbSyncTaskOutputDatabaseErrorResponse', 'MigrateSqlServerSqlDbSyncTaskOutputDatabaseLevelResponse', 'MigrateSqlServerSqlDbSyncTaskOutputErrorResponse', 'MigrateSqlServerSqlDbSyncTaskOutputMigrationLevelResponse', 'MigrateSqlServerSqlDbSyncTaskOutputTableLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'Migrate.SqlServer.AzureSqlDb.Sync'.
:param 'MigrateSqlServerSqlDbSyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'Migrate.SqlServer.AzureSqlDb.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'Migrate.SqlServer.AzureSqlDb.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateSqlServerSqlDbSyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskInputResponse(dict):
"""
Input for the task that migrates on-prem SQL Server databases to Azure SQL Database
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
elif key == "validationOptions":
suggest = "validation_options"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
selected_databases: Sequence['outputs.MigrateSqlServerSqlDbDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse',
validation_options: Optional['outputs.MigrationValidationOptionsResponse'] = None):
"""
Input for the task that migrates on-prem SQL Server databases to Azure SQL Database
:param Sequence['MigrateSqlServerSqlDbDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Information for connecting to source
:param 'SqlConnectionInfoResponse' target_connection_info: Information for connecting to target
:param 'MigrationValidationOptionsResponse' validation_options: Options for enabling various post migration validations. Available options,
1.) Data Integrity Check: Performs a checksum based comparison on source and target tables after the migration to ensure the correctness of the data.
2.) Schema Validation: Performs a thorough schema comparison between the source and target tables and provides a list of differences between the source and target database, 3.) Query Analysis: Executes a set of queries picked up automatically either from the Query Plan Cache or Query Store and execute them and compares the execution time between the source and target database.
"""
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
if validation_options is not None:
pulumi.set(__self__, "validation_options", validation_options)
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlDbDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="validationOptions")
def validation_options(self) -> Optional['outputs.MigrationValidationOptionsResponse']:
"""
Options for enabling various post migration validations. Available options,
1.) Data Integrity Check: Performs a checksum based comparison on source and target tables after the migration to ensure the correctness of the data.
2.) Schema Validation: Performs a thorough schema comparison between the source and target tables and provides a list of differences between the source and target database, 3.) Query Analysis: Executes a set of queries picked up automatically either from the Query Plan Cache or Query Store and execute them and compares the execution time between the source and target database.
"""
return pulumi.get(self, "validation_options")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskOutputDatabaseLevelResponse(dict):
"""
Database level result for Sql Server to Azure Sql DB migration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseName":
suggest = "database_name"
elif key == "endedOn":
suggest = "ended_on"
elif key == "errorCount":
suggest = "error_count"
elif key == "errorPrefix":
suggest = "error_prefix"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "numberOfObjects":
suggest = "number_of_objects"
elif key == "numberOfObjectsCompleted":
suggest = "number_of_objects_completed"
elif key == "objectSummary":
suggest = "object_summary"
elif key == "resultPrefix":
suggest = "result_prefix"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
elif key == "statusMessage":
suggest = "status_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_name: str,
ended_on: str,
error_count: float,
error_prefix: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
id: str,
message: str,
number_of_objects: float,
number_of_objects_completed: float,
object_summary: Mapping[str, 'outputs.DataItemMigrationSummaryResultResponse'],
result_prefix: str,
result_type: str,
stage: str,
started_on: str,
state: str,
status_message: str):
"""
Database level result for Sql Server to Azure Sql DB migration.
:param str database_name: Name of the item
:param str ended_on: Migration end time
:param float error_count: Number of database/object errors.
:param str error_prefix: Wildcard string prefix to use for querying all errors of the item
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration exceptions and warnings.
:param str id: Result identifier
:param str message: Migration progress message
:param float number_of_objects: Number of objects
:param float number_of_objects_completed: Number of successfully completed objects
:param Mapping[str, 'DataItemMigrationSummaryResultResponse'] object_summary: Summary of object results in the migration
:param str result_prefix: Wildcard string prefix to use for querying all sub-tem results of the item
:param str result_type: Result type
Expected value is 'DatabaseLevelOutput'.
:param str stage: Migration stage that this database is in
:param str started_on: Migration start time
:param str state: Current state of migration
:param str status_message: Status message
"""
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "error_count", error_count)
pulumi.set(__self__, "error_prefix", error_prefix)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "number_of_objects", number_of_objects)
pulumi.set(__self__, "number_of_objects_completed", number_of_objects_completed)
pulumi.set(__self__, "object_summary", object_summary)
pulumi.set(__self__, "result_prefix", result_prefix)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "stage", stage)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "status_message", status_message)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the item
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="errorCount")
def error_count(self) -> float:
"""
Number of database/object errors.
"""
return pulumi.get(self, "error_count")
@property
@pulumi.getter(name="errorPrefix")
def error_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all errors of the item
"""
return pulumi.get(self, "error_prefix")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration exceptions and warnings.
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def message(self) -> str:
"""
Migration progress message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="numberOfObjects")
def number_of_objects(self) -> float:
"""
Number of objects
"""
return pulumi.get(self, "number_of_objects")
@property
@pulumi.getter(name="numberOfObjectsCompleted")
def number_of_objects_completed(self) -> float:
"""
Number of successfully completed objects
"""
return pulumi.get(self, "number_of_objects_completed")
@property
@pulumi.getter(name="objectSummary")
def object_summary(self) -> Mapping[str, 'outputs.DataItemMigrationSummaryResultResponse']:
"""
Summary of object results in the migration
"""
return pulumi.get(self, "object_summary")
@property
@pulumi.getter(name="resultPrefix")
def result_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all sub-tem results of the item
"""
return pulumi.get(self, "result_prefix")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter
def stage(self) -> str:
"""
Migration stage that this database is in
"""
return pulumi.get(self, "stage")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> str:
"""
Status message
"""
return pulumi.get(self, "status_message")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResultResponse(dict):
"""
Database validation result for Sql Server to Azure Sql DB migration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dataIntegrityValidationResult":
suggest = "data_integrity_validation_result"
elif key == "endedOn":
suggest = "ended_on"
elif key == "migrationId":
suggest = "migration_id"
elif key == "queryAnalysisValidationResult":
suggest = "query_analysis_validation_result"
elif key == "resultType":
suggest = "result_type"
elif key == "schemaValidationResult":
suggest = "schema_validation_result"
elif key == "sourceDatabaseName":
suggest = "source_database_name"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetDatabaseName":
suggest = "target_database_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
data_integrity_validation_result: 'outputs.DataIntegrityValidationResultResponse',
ended_on: str,
id: str,
migration_id: str,
query_analysis_validation_result: 'outputs.QueryAnalysisValidationResultResponse',
result_type: str,
schema_validation_result: 'outputs.SchemaComparisonValidationResultResponse',
source_database_name: str,
started_on: str,
status: str,
target_database_name: str):
"""
Database validation result for Sql Server to Azure Sql DB migration.
:param 'DataIntegrityValidationResultResponse' data_integrity_validation_result: Provides data integrity validation result between the source and target tables that are migrated.
:param str ended_on: Validation end time
:param str id: Result identifier
:param str migration_id: Migration Identifier
:param 'QueryAnalysisValidationResultResponse' query_analysis_validation_result: Results of some of the query execution result between source and target database
:param str result_type: Result type
Expected value is 'MigrationDatabaseLevelValidationOutput'.
:param 'SchemaComparisonValidationResultResponse' schema_validation_result: Provides schema comparison result between source and target database
:param str source_database_name: Name of the source database
:param str started_on: Validation start time
:param str status: Current status of validation at the database level
:param str target_database_name: Name of the target database
"""
pulumi.set(__self__, "data_integrity_validation_result", data_integrity_validation_result)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "migration_id", migration_id)
pulumi.set(__self__, "query_analysis_validation_result", query_analysis_validation_result)
pulumi.set(__self__, "result_type", 'MigrationDatabaseLevelValidationOutput')
pulumi.set(__self__, "schema_validation_result", schema_validation_result)
pulumi.set(__self__, "source_database_name", source_database_name)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "target_database_name", target_database_name)
@property
@pulumi.getter(name="dataIntegrityValidationResult")
def data_integrity_validation_result(self) -> 'outputs.DataIntegrityValidationResultResponse':
"""
Provides data integrity validation result between the source and target tables that are migrated.
"""
return pulumi.get(self, "data_integrity_validation_result")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Validation end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="migrationId")
def migration_id(self) -> str:
"""
Migration Identifier
"""
return pulumi.get(self, "migration_id")
@property
@pulumi.getter(name="queryAnalysisValidationResult")
def query_analysis_validation_result(self) -> 'outputs.QueryAnalysisValidationResultResponse':
"""
Results of some of the query execution result between source and target database
"""
return pulumi.get(self, "query_analysis_validation_result")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationDatabaseLevelValidationOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="schemaValidationResult")
def schema_validation_result(self) -> 'outputs.SchemaComparisonValidationResultResponse':
"""
Provides schema comparison result between source and target database
"""
return pulumi.get(self, "schema_validation_result")
@property
@pulumi.getter(name="sourceDatabaseName")
def source_database_name(self) -> str:
"""
Name of the source database
"""
return pulumi.get(self, "source_database_name")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Validation start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def status(self) -> str:
"""
Current status of validation at the database level
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="targetDatabaseName")
def target_database_name(self) -> str:
"""
Name of the target database
"""
return pulumi.get(self, "target_database_name")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskOutputErrorResponse(dict):
"""
Task errors for Sql Server to Azure Sql DB migration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskOutputErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskOutputErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskOutputErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
error: 'outputs.ReportableExceptionResponse',
id: str,
result_type: str):
"""
Task errors for Sql Server to Azure Sql DB migration.
:param 'ReportableExceptionResponse' error: Migration error
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'ErrorOutput'.
"""
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'ErrorOutput')
@property
@pulumi.getter
def error(self) -> 'outputs.ReportableExceptionResponse':
"""
Migration error
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'ErrorOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskOutputMigrationLevelResponse(dict):
"""
Migration level result for Sql server to Azure Sql DB migration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseSummary":
suggest = "database_summary"
elif key == "durationInSeconds":
suggest = "duration_in_seconds"
elif key == "endedOn":
suggest = "ended_on"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "migrationReport":
suggest = "migration_report"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceServerBrandVersion":
suggest = "source_server_brand_version"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "startedOn":
suggest = "started_on"
elif key == "statusMessage":
suggest = "status_message"
elif key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskOutputMigrationLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskOutputMigrationLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskOutputMigrationLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_summary: Mapping[str, 'outputs.DatabaseSummaryResultResponse'],
databases: Mapping[str, str],
duration_in_seconds: float,
ended_on: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
id: str,
message: str,
migration_report: 'outputs.MigrationReportResultResponse',
result_type: str,
source_server_brand_version: str,
source_server_version: str,
started_on: str,
status: str,
status_message: str,
target_server_brand_version: str,
target_server_version: str):
"""
Migration level result for Sql server to Azure Sql DB migration.
:param Mapping[str, 'DatabaseSummaryResultResponse'] database_summary: Summary of database results in the migration
:param Mapping[str, str] databases: Selected databases as a map from database name to database id
:param float duration_in_seconds: Duration of task execution in seconds.
:param str ended_on: Migration end time
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration exceptions and warnings.
:param str id: Result identifier
:param str message: Migration progress message
:param 'MigrationReportResultResponse' migration_report: Migration Report Result, provides unique url for downloading your migration report.
:param str result_type: Result type
Expected value is 'MigrationLevelOutput'.
:param str source_server_brand_version: Source server brand version
:param str source_server_version: Source server version
:param str started_on: Migration start time
:param str status: Current status of migration
:param str status_message: Migration status message
:param str target_server_brand_version: Target server brand version
:param str target_server_version: Target server version
"""
pulumi.set(__self__, "database_summary", database_summary)
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "duration_in_seconds", duration_in_seconds)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "migration_report", migration_report)
pulumi.set(__self__, "result_type", 'MigrationLevelOutput')
pulumi.set(__self__, "source_server_brand_version", source_server_brand_version)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "status_message", status_message)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter(name="databaseSummary")
def database_summary(self) -> Mapping[str, 'outputs.DatabaseSummaryResultResponse']:
"""
Summary of database results in the migration
"""
return pulumi.get(self, "database_summary")
@property
@pulumi.getter
def databases(self) -> Mapping[str, str]:
"""
Selected databases as a map from database name to database id
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter(name="durationInSeconds")
def duration_in_seconds(self) -> float:
"""
Duration of task execution in seconds.
"""
return pulumi.get(self, "duration_in_seconds")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration exceptions and warnings.
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def message(self) -> str:
"""
Migration progress message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="migrationReport")
def migration_report(self) -> 'outputs.MigrationReportResultResponse':
"""
Migration Report Result, provides unique url for downloading your migration report.
"""
return pulumi.get(self, "migration_report")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceServerBrandVersion")
def source_server_brand_version(self) -> str:
"""
Source server brand version
"""
return pulumi.get(self, "source_server_brand_version")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def status(self) -> str:
"""
Current status of migration
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> str:
"""
Migration status message
"""
return pulumi.get(self, "status_message")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskOutputTableLevelResponse(dict):
"""
Table level result for Sql Server to Azure Sql DB migration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "errorPrefix":
suggest = "error_prefix"
elif key == "itemsCompletedCount":
suggest = "items_completed_count"
elif key == "itemsCount":
suggest = "items_count"
elif key == "objectName":
suggest = "object_name"
elif key == "resultPrefix":
suggest = "result_prefix"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
elif key == "statusMessage":
suggest = "status_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskOutputTableLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskOutputTableLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskOutputTableLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
error_prefix: str,
id: str,
items_completed_count: float,
items_count: float,
object_name: str,
result_prefix: str,
result_type: str,
started_on: str,
state: str,
status_message: str):
"""
Table level result for Sql Server to Azure Sql DB migration.
:param str ended_on: Migration end time
:param str error_prefix: Wildcard string prefix to use for querying all errors of the item
:param str id: Result identifier
:param float items_completed_count: Number of successfully completed items
:param float items_count: Number of items
:param str object_name: Name of the item
:param str result_prefix: Wildcard string prefix to use for querying all sub-tem results of the item
:param str result_type: Result type
Expected value is 'TableLevelOutput'.
:param str started_on: Migration start time
:param str state: Current state of migration
:param str status_message: Status message
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "error_prefix", error_prefix)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "items_completed_count", items_completed_count)
pulumi.set(__self__, "items_count", items_count)
pulumi.set(__self__, "object_name", object_name)
pulumi.set(__self__, "result_prefix", result_prefix)
pulumi.set(__self__, "result_type", 'TableLevelOutput')
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "status_message", status_message)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="errorPrefix")
def error_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all errors of the item
"""
return pulumi.get(self, "error_prefix")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="itemsCompletedCount")
def items_completed_count(self) -> float:
"""
Number of successfully completed items
"""
return pulumi.get(self, "items_completed_count")
@property
@pulumi.getter(name="itemsCount")
def items_count(self) -> float:
"""
Number of items
"""
return pulumi.get(self, "items_count")
@property
@pulumi.getter(name="objectName")
def object_name(self) -> str:
"""
Name of the item
"""
return pulumi.get(self, "object_name")
@property
@pulumi.getter(name="resultPrefix")
def result_prefix(self) -> str:
"""
Wildcard string prefix to use for querying all sub-tem results of the item
"""
return pulumi.get(self, "result_prefix")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'TableLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="statusMessage")
def status_message(self) -> str:
"""
Status message
"""
return pulumi.get(self, "status_message")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskOutputValidationResultResponse(dict):
"""
Validation result for Sql Server to Azure Sql DB migration.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "migrationId":
suggest = "migration_id"
elif key == "resultType":
suggest = "result_type"
elif key == "summaryResults":
suggest = "summary_results"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskOutputValidationResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskOutputValidationResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskOutputValidationResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
migration_id: str,
result_type: str,
status: str,
summary_results: Optional[Mapping[str, 'outputs.MigrationValidationDatabaseSummaryResultResponse']] = None):
"""
Validation result for Sql Server to Azure Sql DB migration.
:param str id: Result identifier
:param str migration_id: Migration Identifier
:param str result_type: Result type
Expected value is 'MigrationValidationOutput'.
:param str status: Current status of validation at the migration level. Status from the database validation result status will be aggregated here.
:param Mapping[str, 'MigrationValidationDatabaseSummaryResultResponse'] summary_results: Validation summary results for each database
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "migration_id", migration_id)
pulumi.set(__self__, "result_type", 'MigrationValidationOutput')
pulumi.set(__self__, "status", status)
if summary_results is not None:
pulumi.set(__self__, "summary_results", summary_results)
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="migrationId")
def migration_id(self) -> str:
"""
Migration Identifier
"""
return pulumi.get(self, "migration_id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationValidationOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter
def status(self) -> str:
"""
Current status of validation at the migration level. Status from the database validation result status will be aggregated here.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="summaryResults")
def summary_results(self) -> Optional[Mapping[str, 'outputs.MigrationValidationDatabaseSummaryResultResponse']]:
"""
Validation summary results for each database
"""
return pulumi.get(self, "summary_results")
@pulumi.output_type
class MigrateSqlServerSqlDbTaskPropertiesResponse(dict):
"""
Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlDbTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlDbTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlDbTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.MigrateSqlServerSqlDbTaskInputResponse'] = None):
"""
Properties for the task that migrates on-prem SQL Server databases to Azure SQL Database
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['MigrateSqlServerSqlDbTaskOutputDatabaseLevelResponse', 'MigrateSqlServerSqlDbTaskOutputDatabaseLevelValidationResultResponse', 'MigrateSqlServerSqlDbTaskOutputErrorResponse', 'MigrateSqlServerSqlDbTaskOutputMigrationLevelResponse', 'MigrateSqlServerSqlDbTaskOutputTableLevelResponse', 'MigrateSqlServerSqlDbTaskOutputValidationResultResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'Migrate.SqlServer.SqlDb'.
:param 'MigrateSqlServerSqlDbTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'Migrate.SqlServer.SqlDb')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'Migrate.SqlServer.SqlDb'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateSqlServerSqlDbTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrateSqlServerSqlMIDatabaseInputResponse(dict):
"""
Database specific information for SQL to Azure SQL DB Managed Instance migration task inputs
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "restoreDatabaseName":
suggest = "restore_database_name"
elif key == "backupFilePaths":
suggest = "backup_file_paths"
elif key == "backupFileShare":
suggest = "backup_file_share"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMIDatabaseInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMIDatabaseInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMIDatabaseInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
name: str,
restore_database_name: str,
backup_file_paths: Optional[Sequence[str]] = None,
backup_file_share: Optional['outputs.FileShareResponse'] = None):
"""
Database specific information for SQL to Azure SQL DB Managed Instance migration task inputs
:param str name: Name of the database
:param str restore_database_name: Name of the database at destination
:param Sequence[str] backup_file_paths: The list of backup files to be used in case of existing backups.
:param 'FileShareResponse' backup_file_share: Backup file share information for backing up this database.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "restore_database_name", restore_database_name)
if backup_file_paths is not None:
pulumi.set(__self__, "backup_file_paths", backup_file_paths)
if backup_file_share is not None:
pulumi.set(__self__, "backup_file_share", backup_file_share)
@property
@pulumi.getter
def name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="restoreDatabaseName")
def restore_database_name(self) -> str:
"""
Name of the database at destination
"""
return pulumi.get(self, "restore_database_name")
@property
@pulumi.getter(name="backupFilePaths")
def backup_file_paths(self) -> Optional[Sequence[str]]:
"""
The list of backup files to be used in case of existing backups.
"""
return pulumi.get(self, "backup_file_paths")
@property
@pulumi.getter(name="backupFileShare")
def backup_file_share(self) -> Optional['outputs.FileShareResponse']:
"""
Backup file share information for backing up this database.
"""
return pulumi.get(self, "backup_file_share")
@pulumi.output_type
class MigrateSqlServerSqlMISyncTaskInputResponse(dict):
"""
Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "azureApp":
suggest = "azure_app"
elif key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "storageResourceId":
suggest = "storage_resource_id"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
elif key == "backupFileShare":
suggest = "backup_file_share"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMISyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMISyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMISyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
azure_app: 'outputs.AzureActiveDirectoryAppResponse',
selected_databases: Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
storage_resource_id: str,
target_connection_info: 'outputs.MiSqlConnectionInfoResponse',
backup_file_share: Optional['outputs.FileShareResponse'] = None):
"""
Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
:param 'AzureActiveDirectoryAppResponse' azure_app: Azure Active Directory Application the DMS instance will use to connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage Account
:param Sequence['MigrateSqlServerSqlMIDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Connection information for source SQL Server
:param str storage_resource_id: Fully qualified resourceId of storage
:param 'MiSqlConnectionInfoResponse' target_connection_info: Connection information for Azure SQL Database Managed Instance
:param 'FileShareResponse' backup_file_share: Backup file share information for all selected databases.
"""
pulumi.set(__self__, "azure_app", azure_app)
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "storage_resource_id", storage_resource_id)
pulumi.set(__self__, "target_connection_info", target_connection_info)
if backup_file_share is not None:
pulumi.set(__self__, "backup_file_share", backup_file_share)
@property
@pulumi.getter(name="azureApp")
def azure_app(self) -> 'outputs.AzureActiveDirectoryAppResponse':
"""
Azure Active Directory Application the DMS instance will use to connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage Account
"""
return pulumi.get(self, "azure_app")
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for source SQL Server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="storageResourceId")
def storage_resource_id(self) -> str:
"""
Fully qualified resourceId of storage
"""
return pulumi.get(self, "storage_resource_id")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.MiSqlConnectionInfoResponse':
"""
Connection information for Azure SQL Database Managed Instance
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="backupFileShare")
def backup_file_share(self) -> Optional['outputs.FileShareResponse']:
"""
Backup file share information for all selected databases.
"""
return pulumi.get(self, "backup_file_share")
@pulumi.output_type
class MigrateSqlServerSqlMISyncTaskOutputDatabaseLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "activeBackupSets":
suggest = "active_backup_sets"
elif key == "containerName":
suggest = "container_name"
elif key == "endedOn":
suggest = "ended_on"
elif key == "errorPrefix":
suggest = "error_prefix"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "fullBackupSetInfo":
suggest = "full_backup_set_info"
elif key == "isFullBackupRestored":
suggest = "is_full_backup_restored"
elif key == "lastRestoredBackupSetInfo":
suggest = "last_restored_backup_set_info"
elif key == "migrationState":
suggest = "migration_state"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceDatabaseName":
suggest = "source_database_name"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMISyncTaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMISyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMISyncTaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
active_backup_sets: Sequence['outputs.BackupSetInfoResponse'],
container_name: str,
ended_on: str,
error_prefix: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
full_backup_set_info: 'outputs.BackupSetInfoResponse',
id: str,
is_full_backup_restored: bool,
last_restored_backup_set_info: 'outputs.BackupSetInfoResponse',
migration_state: str,
result_type: str,
source_database_name: str,
started_on: str):
"""
:param Sequence['BackupSetInfoResponse'] active_backup_sets: Backup sets that are currently active (Either being uploaded or getting restored)
:param str container_name: Name of container created in the Azure Storage account where backups are copied to
:param str ended_on: Database migration end time
:param str error_prefix: prefix string to use for querying errors for this database
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration exceptions and warnings
:param 'BackupSetInfoResponse' full_backup_set_info: Details of full backup set
:param str id: Result identifier
:param bool is_full_backup_restored: Whether full backup has been applied to the target database or not
:param 'BackupSetInfoResponse' last_restored_backup_set_info: Last applied backup set information
:param str migration_state: Current state of database
:param str result_type: Result type
Expected value is 'DatabaseLevelOutput'.
:param str source_database_name: Name of the database
:param str started_on: Database migration start time
"""
pulumi.set(__self__, "active_backup_sets", active_backup_sets)
pulumi.set(__self__, "container_name", container_name)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "error_prefix", error_prefix)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "full_backup_set_info", full_backup_set_info)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_full_backup_restored", is_full_backup_restored)
pulumi.set(__self__, "last_restored_backup_set_info", last_restored_backup_set_info)
pulumi.set(__self__, "migration_state", migration_state)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "source_database_name", source_database_name)
pulumi.set(__self__, "started_on", started_on)
@property
@pulumi.getter(name="activeBackupSets")
def active_backup_sets(self) -> Sequence['outputs.BackupSetInfoResponse']:
"""
Backup sets that are currently active (Either being uploaded or getting restored)
"""
return pulumi.get(self, "active_backup_sets")
@property
@pulumi.getter(name="containerName")
def container_name(self) -> str:
"""
Name of container created in the Azure Storage account where backups are copied to
"""
return pulumi.get(self, "container_name")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Database migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="errorPrefix")
def error_prefix(self) -> str:
"""
prefix string to use for querying errors for this database
"""
return pulumi.get(self, "error_prefix")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration exceptions and warnings
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter(name="fullBackupSetInfo")
def full_backup_set_info(self) -> 'outputs.BackupSetInfoResponse':
"""
Details of full backup set
"""
return pulumi.get(self, "full_backup_set_info")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isFullBackupRestored")
def is_full_backup_restored(self) -> bool:
"""
Whether full backup has been applied to the target database or not
"""
return pulumi.get(self, "is_full_backup_restored")
@property
@pulumi.getter(name="lastRestoredBackupSetInfo")
def last_restored_backup_set_info(self) -> 'outputs.BackupSetInfoResponse':
"""
Last applied backup set information
"""
return pulumi.get(self, "last_restored_backup_set_info")
@property
@pulumi.getter(name="migrationState")
def migration_state(self) -> str:
"""
Current state of database
"""
return pulumi.get(self, "migration_state")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceDatabaseName")
def source_database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "source_database_name")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Database migration start time
"""
return pulumi.get(self, "started_on")
@pulumi.output_type
class MigrateSqlServerSqlMISyncTaskOutputErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMISyncTaskOutputErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMISyncTaskOutputErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMISyncTaskOutputErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
error: 'outputs.ReportableExceptionResponse',
id: str,
result_type: str):
"""
:param 'ReportableExceptionResponse' error: Migration error
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'ErrorOutput'.
"""
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'ErrorOutput')
@property
@pulumi.getter
def error(self) -> 'outputs.ReportableExceptionResponse':
"""
Migration error
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'ErrorOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class MigrateSqlServerSqlMISyncTaskOutputMigrationLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseCount":
suggest = "database_count"
elif key == "databaseErrorCount":
suggest = "database_error_count"
elif key == "endedOn":
suggest = "ended_on"
elif key == "resultType":
suggest = "result_type"
elif key == "sourceServerBrandVersion":
suggest = "source_server_brand_version"
elif key == "sourceServerName":
suggest = "source_server_name"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerName":
suggest = "target_server_name"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMISyncTaskOutputMigrationLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMISyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMISyncTaskOutputMigrationLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_count: int,
database_error_count: int,
ended_on: str,
id: str,
result_type: str,
source_server_brand_version: str,
source_server_name: str,
source_server_version: str,
started_on: str,
state: str,
target_server_brand_version: str,
target_server_name: str,
target_server_version: str):
"""
:param int database_count: Count of databases
:param int database_error_count: Number of database level errors
:param str ended_on: Migration end time
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'MigrationLevelOutput'.
:param str source_server_brand_version: Source server brand version
:param str source_server_name: Source server name
:param str source_server_version: Source server version
:param str started_on: Migration start time
:param str state: Current state of migration
:param str target_server_brand_version: Target server brand version
:param str target_server_name: Target server name
:param str target_server_version: Target server version
"""
pulumi.set(__self__, "database_count", database_count)
pulumi.set(__self__, "database_error_count", database_error_count)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'MigrationLevelOutput')
pulumi.set(__self__, "source_server_brand_version", source_server_brand_version)
pulumi.set(__self__, "source_server_name", source_server_name)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_name", target_server_name)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter(name="databaseCount")
def database_count(self) -> int:
"""
Count of databases
"""
return pulumi.get(self, "database_count")
@property
@pulumi.getter(name="databaseErrorCount")
def database_error_count(self) -> int:
"""
Number of database level errors
"""
return pulumi.get(self, "database_error_count")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sourceServerBrandVersion")
def source_server_brand_version(self) -> str:
"""
Source server brand version
"""
return pulumi.get(self, "source_server_brand_version")
@property
@pulumi.getter(name="sourceServerName")
def source_server_name(self) -> str:
"""
Source server name
"""
return pulumi.get(self, "source_server_name")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerName")
def target_server_name(self) -> str:
"""
Target server name
"""
return pulumi.get(self, "target_server_name")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class MigrateSqlServerSqlMISyncTaskPropertiesResponse(dict):
"""
Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance sync scenario
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMISyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMISyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMISyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.MigrateSqlServerSqlMISyncTaskInputResponse'] = None):
"""
Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance sync scenario
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['MigrateSqlServerSqlMISyncTaskOutputDatabaseLevelResponse', 'MigrateSqlServerSqlMISyncTaskOutputErrorResponse', 'MigrateSqlServerSqlMISyncTaskOutputMigrationLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'Migrate.SqlServer.AzureSqlDbMI.Sync.LRS'.
:param 'MigrateSqlServerSqlMISyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'Migrate.SqlServer.AzureSqlDbMI.Sync.LRS')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'Migrate.SqlServer.AzureSqlDbMI.Sync.LRS'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateSqlServerSqlMISyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrateSqlServerSqlMITaskInputResponse(dict):
"""
Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backupBlobShare":
suggest = "backup_blob_share"
elif key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
elif key == "backupFileShare":
suggest = "backup_file_share"
elif key == "backupMode":
suggest = "backup_mode"
elif key == "selectedAgentJobs":
suggest = "selected_agent_jobs"
elif key == "selectedLogins":
suggest = "selected_logins"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backup_blob_share: 'outputs.BlobShareResponse',
selected_databases: Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse',
backup_file_share: Optional['outputs.FileShareResponse'] = None,
backup_mode: Optional[str] = None,
selected_agent_jobs: Optional[Sequence[str]] = None,
selected_logins: Optional[Sequence[str]] = None):
"""
Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance.
:param 'BlobShareResponse' backup_blob_share: SAS URI of Azure Storage Account Container to be used for storing backup files.
:param Sequence['MigrateSqlServerSqlMIDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Information for connecting to source
:param 'SqlConnectionInfoResponse' target_connection_info: Information for connecting to target
:param 'FileShareResponse' backup_file_share: Backup file share information for all selected databases.
:param str backup_mode: Backup Mode to specify whether to use existing backup or create new backup. If using existing backups, backup file paths are required to be provided in selectedDatabases.
:param Sequence[str] selected_agent_jobs: Agent Jobs to migrate.
:param Sequence[str] selected_logins: Logins to migrate.
"""
pulumi.set(__self__, "backup_blob_share", backup_blob_share)
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
if backup_file_share is not None:
pulumi.set(__self__, "backup_file_share", backup_file_share)
if backup_mode is not None:
pulumi.set(__self__, "backup_mode", backup_mode)
if selected_agent_jobs is not None:
pulumi.set(__self__, "selected_agent_jobs", selected_agent_jobs)
if selected_logins is not None:
pulumi.set(__self__, "selected_logins", selected_logins)
@property
@pulumi.getter(name="backupBlobShare")
def backup_blob_share(self) -> 'outputs.BlobShareResponse':
"""
SAS URI of Azure Storage Account Container to be used for storing backup files.
"""
return pulumi.get(self, "backup_blob_share")
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="backupFileShare")
def backup_file_share(self) -> Optional['outputs.FileShareResponse']:
"""
Backup file share information for all selected databases.
"""
return pulumi.get(self, "backup_file_share")
@property
@pulumi.getter(name="backupMode")
def backup_mode(self) -> Optional[str]:
"""
Backup Mode to specify whether to use existing backup or create new backup. If using existing backups, backup file paths are required to be provided in selectedDatabases.
"""
return pulumi.get(self, "backup_mode")
@property
@pulumi.getter(name="selectedAgentJobs")
def selected_agent_jobs(self) -> Optional[Sequence[str]]:
"""
Agent Jobs to migrate.
"""
return pulumi.get(self, "selected_agent_jobs")
@property
@pulumi.getter(name="selectedLogins")
def selected_logins(self) -> Optional[Sequence[str]]:
"""
Logins to migrate.
"""
return pulumi.get(self, "selected_logins")
@pulumi.output_type
class MigrateSqlServerSqlMITaskOutputAgentJobLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "isEnabled":
suggest = "is_enabled"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskOutputAgentJobLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskOutputAgentJobLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskOutputAgentJobLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
id: str,
is_enabled: bool,
message: str,
name: str,
result_type: str,
started_on: str,
state: str):
"""
:param str ended_on: Migration end time
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration errors and warnings per job
:param str id: Result identifier
:param bool is_enabled: The state of the original Agent Job.
:param str message: Migration progress message
:param str name: Agent Job name.
:param str result_type: Result type
Expected value is 'AgentJobLevelOutput'.
:param str started_on: Migration start time
:param str state: Current state of migration
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_enabled", is_enabled)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "result_type", 'AgentJobLevelOutput')
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration errors and warnings per job
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> bool:
"""
The state of the original Agent Job.
"""
return pulumi.get(self, "is_enabled")
@property
@pulumi.getter
def message(self) -> str:
"""
Migration progress message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter
def name(self) -> str:
"""
Agent Job name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'AgentJobLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@pulumi.output_type
class MigrateSqlServerSqlMITaskOutputDatabaseLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseName":
suggest = "database_name"
elif key == "endedOn":
suggest = "ended_on"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "resultType":
suggest = "result_type"
elif key == "sizeMB":
suggest = "size_mb"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskOutputDatabaseLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskOutputDatabaseLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskOutputDatabaseLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_name: str,
ended_on: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
id: str,
message: str,
result_type: str,
size_mb: float,
stage: str,
started_on: str,
state: str):
"""
:param str database_name: Name of the database
:param str ended_on: Migration end time
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration exceptions and warnings
:param str id: Result identifier
:param str message: Migration progress message
:param str result_type: Result type
Expected value is 'DatabaseLevelOutput'.
:param float size_mb: Size of the database in megabytes
:param str stage: Current stage of migration
:param str started_on: Migration start time
:param str state: Current state of migration
"""
pulumi.set(__self__, "database_name", database_name)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "result_type", 'DatabaseLevelOutput')
pulumi.set(__self__, "size_mb", size_mb)
pulumi.set(__self__, "stage", stage)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration exceptions and warnings
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def message(self) -> str:
"""
Migration progress message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'DatabaseLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="sizeMB")
def size_mb(self) -> float:
"""
Size of the database in megabytes
"""
return pulumi.get(self, "size_mb")
@property
@pulumi.getter
def stage(self) -> str:
"""
Current stage of migration
"""
return pulumi.get(self, "stage")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@pulumi.output_type
class MigrateSqlServerSqlMITaskOutputErrorResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resultType":
suggest = "result_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskOutputErrorResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskOutputErrorResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskOutputErrorResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
error: 'outputs.ReportableExceptionResponse',
id: str,
result_type: str):
"""
:param 'ReportableExceptionResponse' error: Migration error
:param str id: Result identifier
:param str result_type: Result type
Expected value is 'ErrorOutput'.
"""
pulumi.set(__self__, "error", error)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "result_type", 'ErrorOutput')
@property
@pulumi.getter
def error(self) -> 'outputs.ReportableExceptionResponse':
"""
Migration error
"""
return pulumi.get(self, "error")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'ErrorOutput'.
"""
return pulumi.get(self, "result_type")
@pulumi.output_type
class MigrateSqlServerSqlMITaskOutputLoginLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "loginName":
suggest = "login_name"
elif key == "resultType":
suggest = "result_type"
elif key == "startedOn":
suggest = "started_on"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskOutputLoginLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskOutputLoginLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskOutputLoginLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
id: str,
login_name: str,
message: str,
result_type: str,
stage: str,
started_on: str,
state: str):
"""
:param str ended_on: Login migration end time
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Login migration errors and warnings per login
:param str id: Result identifier
:param str login_name: Login name.
:param str message: Login migration progress message
:param str result_type: Result type
Expected value is 'LoginLevelOutput'.
:param str stage: Current stage of login
:param str started_on: Login migration start time
:param str state: Current state of login
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "login_name", login_name)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "result_type", 'LoginLevelOutput')
pulumi.set(__self__, "stage", stage)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Login migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Login migration errors and warnings per login
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="loginName")
def login_name(self) -> str:
"""
Login name.
"""
return pulumi.get(self, "login_name")
@property
@pulumi.getter
def message(self) -> str:
"""
Login migration progress message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'LoginLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter
def stage(self) -> str:
"""
Current stage of login
"""
return pulumi.get(self, "stage")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Login migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of login
"""
return pulumi.get(self, "state")
@pulumi.output_type
class MigrateSqlServerSqlMITaskOutputMigrationLevelResponse(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "agentJobs":
suggest = "agent_jobs"
elif key == "endedOn":
suggest = "ended_on"
elif key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
elif key == "orphanedUsersInfo":
suggest = "orphaned_users_info"
elif key == "resultType":
suggest = "result_type"
elif key == "serverRoleResults":
suggest = "server_role_results"
elif key == "sourceServerBrandVersion":
suggest = "source_server_brand_version"
elif key == "sourceServerVersion":
suggest = "source_server_version"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetServerBrandVersion":
suggest = "target_server_brand_version"
elif key == "targetServerVersion":
suggest = "target_server_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskOutputMigrationLevelResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskOutputMigrationLevelResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskOutputMigrationLevelResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
agent_jobs: Mapping[str, str],
databases: Mapping[str, str],
ended_on: str,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
id: str,
logins: Mapping[str, str],
message: str,
orphaned_users_info: Sequence['outputs.OrphanedUserInfoResponse'],
result_type: str,
server_role_results: Mapping[str, 'outputs.StartMigrationScenarioServerRoleResultResponse'],
source_server_brand_version: str,
source_server_version: str,
started_on: str,
state: str,
status: str,
target_server_brand_version: str,
target_server_version: str):
"""
:param Mapping[str, str] agent_jobs: Selected agent jobs as a map from name to id
:param Mapping[str, str] databases: Selected databases as a map from database name to database id
:param str ended_on: Migration end time
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration exceptions and warnings.
:param str id: Result identifier
:param Mapping[str, str] logins: Selected logins as a map from name to id
:param str message: Migration progress message
:param Sequence['OrphanedUserInfoResponse'] orphaned_users_info: List of orphaned users.
:param str result_type: Result type
Expected value is 'MigrationLevelOutput'.
:param Mapping[str, 'StartMigrationScenarioServerRoleResultResponse'] server_role_results: Map of server role migration results.
:param str source_server_brand_version: Source server brand version
:param str source_server_version: Source server version
:param str started_on: Migration start time
:param str state: Current state of migration
:param str status: Current status of migration
:param str target_server_brand_version: Target server brand version
:param str target_server_version: Target server version
"""
pulumi.set(__self__, "agent_jobs", agent_jobs)
pulumi.set(__self__, "databases", databases)
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "logins", logins)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "orphaned_users_info", orphaned_users_info)
pulumi.set(__self__, "result_type", 'MigrationLevelOutput')
pulumi.set(__self__, "server_role_results", server_role_results)
pulumi.set(__self__, "source_server_brand_version", source_server_brand_version)
pulumi.set(__self__, "source_server_version", source_server_version)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "target_server_brand_version", target_server_brand_version)
pulumi.set(__self__, "target_server_version", target_server_version)
@property
@pulumi.getter(name="agentJobs")
def agent_jobs(self) -> Mapping[str, str]:
"""
Selected agent jobs as a map from name to id
"""
return pulumi.get(self, "agent_jobs")
@property
@pulumi.getter
def databases(self) -> Mapping[str, str]:
"""
Selected databases as a map from database name to database id
"""
return pulumi.get(self, "databases")
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Migration end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration exceptions and warnings.
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def logins(self) -> Mapping[str, str]:
"""
Selected logins as a map from name to id
"""
return pulumi.get(self, "logins")
@property
@pulumi.getter
def message(self) -> str:
"""
Migration progress message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="orphanedUsersInfo")
def orphaned_users_info(self) -> Sequence['outputs.OrphanedUserInfoResponse']:
"""
List of orphaned users.
"""
return pulumi.get(self, "orphaned_users_info")
@property
@pulumi.getter(name="resultType")
def result_type(self) -> str:
"""
Result type
Expected value is 'MigrationLevelOutput'.
"""
return pulumi.get(self, "result_type")
@property
@pulumi.getter(name="serverRoleResults")
def server_role_results(self) -> Mapping[str, 'outputs.StartMigrationScenarioServerRoleResultResponse']:
"""
Map of server role migration results.
"""
return pulumi.get(self, "server_role_results")
@property
@pulumi.getter(name="sourceServerBrandVersion")
def source_server_brand_version(self) -> str:
"""
Source server brand version
"""
return pulumi.get(self, "source_server_brand_version")
@property
@pulumi.getter(name="sourceServerVersion")
def source_server_version(self) -> str:
"""
Source server version
"""
return pulumi.get(self, "source_server_version")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Migration start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def status(self) -> str:
"""
Current status of migration
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="targetServerBrandVersion")
def target_server_brand_version(self) -> str:
"""
Target server brand version
"""
return pulumi.get(self, "target_server_brand_version")
@property
@pulumi.getter(name="targetServerVersion")
def target_server_version(self) -> str:
"""
Target server version
"""
return pulumi.get(self, "target_server_version")
@pulumi.output_type
class MigrateSqlServerSqlMITaskPropertiesResponse(dict):
"""
Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSqlServerSqlMITaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSqlServerSqlMITaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSqlServerSqlMITaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence[Any],
state: str,
task_type: str,
input: Optional['outputs.MigrateSqlServerSqlMITaskInputResponse'] = None):
"""
Properties for task that migrates SQL Server databases to Azure SQL Database Managed Instance
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence[Union['MigrateSqlServerSqlMITaskOutputAgentJobLevelResponse', 'MigrateSqlServerSqlMITaskOutputDatabaseLevelResponse', 'MigrateSqlServerSqlMITaskOutputErrorResponse', 'MigrateSqlServerSqlMITaskOutputLoginLevelResponse', 'MigrateSqlServerSqlMITaskOutputMigrationLevelResponse']] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'Migrate.SqlServer.AzureSqlDbMI'.
:param 'MigrateSqlServerSqlMITaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'Migrate.SqlServer.AzureSqlDbMI')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence[Any]:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'Migrate.SqlServer.AzureSqlDbMI'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateSqlServerSqlMITaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrateSyncCompleteCommandInputResponse(dict):
"""
Input for command that completes sync migration for a database.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseName":
suggest = "database_name"
elif key == "commitTimeStamp":
suggest = "commit_time_stamp"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSyncCompleteCommandInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSyncCompleteCommandInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSyncCompleteCommandInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_name: str,
commit_time_stamp: Optional[str] = None):
"""
Input for command that completes sync migration for a database.
:param str database_name: Name of database
:param str commit_time_stamp: Time stamp to complete
"""
pulumi.set(__self__, "database_name", database_name)
if commit_time_stamp is not None:
pulumi.set(__self__, "commit_time_stamp", commit_time_stamp)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> str:
"""
Name of database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter(name="commitTimeStamp")
def commit_time_stamp(self) -> Optional[str]:
"""
Time stamp to complete
"""
return pulumi.get(self, "commit_time_stamp")
@pulumi.output_type
class MigrateSyncCompleteCommandOutputResponse(dict):
"""
Output for command that completes sync migration for a database.
"""
def __init__(__self__, *,
errors: Sequence['outputs.ReportableExceptionResponse'],
id: str):
"""
Output for command that completes sync migration for a database.
:param Sequence['ReportableExceptionResponse'] errors: List of errors that happened during the command execution
:param str id: Result identifier
"""
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
List of errors that happened during the command execution
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@pulumi.output_type
class MigrateSyncCompleteCommandPropertiesResponse(dict):
"""
Properties for the command that completes sync migration for a database.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "commandType":
suggest = "command_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrateSyncCompleteCommandPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrateSyncCompleteCommandPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrateSyncCompleteCommandPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
command_type: str,
errors: Sequence['outputs.ODataErrorResponse'],
output: 'outputs.MigrateSyncCompleteCommandOutputResponse',
state: str,
input: Optional['outputs.MigrateSyncCompleteCommandInputResponse'] = None):
"""
Properties for the command that completes sync migration for a database.
:param str command_type: Command type.
Expected value is 'Migrate.Sync.Complete.Database'.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param 'MigrateSyncCompleteCommandOutputResponse' output: Command output. This is ignored if submitted.
:param str state: The state of the command. This is ignored if submitted.
:param 'MigrateSyncCompleteCommandInputResponse' input: Command input
"""
pulumi.set(__self__, "command_type", 'Migrate.Sync.Complete.Database')
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter(name="commandType")
def command_type(self) -> str:
"""
Command type.
Expected value is 'Migrate.Sync.Complete.Database'.
"""
return pulumi.get(self, "command_type")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> 'outputs.MigrateSyncCompleteCommandOutputResponse':
"""
Command output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the command. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def input(self) -> Optional['outputs.MigrateSyncCompleteCommandInputResponse']:
"""
Command input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class MigrationEligibilityInfoResponse(dict):
"""
Information about migration eligibility of a server object
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "isEligibleForMigration":
suggest = "is_eligible_for_migration"
elif key == "validationMessages":
suggest = "validation_messages"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrationEligibilityInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrationEligibilityInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrationEligibilityInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
is_eligible_for_migration: bool,
validation_messages: Sequence[str]):
"""
Information about migration eligibility of a server object
:param bool is_eligible_for_migration: Whether object is eligible for migration or not.
:param Sequence[str] validation_messages: Information about eligibility failure for the server object.
"""
pulumi.set(__self__, "is_eligible_for_migration", is_eligible_for_migration)
pulumi.set(__self__, "validation_messages", validation_messages)
@property
@pulumi.getter(name="isEligibleForMigration")
def is_eligible_for_migration(self) -> bool:
"""
Whether object is eligible for migration or not.
"""
return pulumi.get(self, "is_eligible_for_migration")
@property
@pulumi.getter(name="validationMessages")
def validation_messages(self) -> Sequence[str]:
"""
Information about eligibility failure for the server object.
"""
return pulumi.get(self, "validation_messages")
@pulumi.output_type
class MigrationReportResultResponse(dict):
"""
Migration validation report result, contains the url for downloading the generated report.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "reportUrl":
suggest = "report_url"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrationReportResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrationReportResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrationReportResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
report_url: str):
"""
Migration validation report result, contains the url for downloading the generated report.
:param str id: Migration validation result identifier
:param str report_url: The url of the report.
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "report_url", report_url)
@property
@pulumi.getter
def id(self) -> str:
"""
Migration validation result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="reportUrl")
def report_url(self) -> str:
"""
The url of the report.
"""
return pulumi.get(self, "report_url")
@pulumi.output_type
class MigrationValidationDatabaseSummaryResultResponse(dict):
"""
Migration Validation Database level summary result
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endedOn":
suggest = "ended_on"
elif key == "migrationId":
suggest = "migration_id"
elif key == "sourceDatabaseName":
suggest = "source_database_name"
elif key == "startedOn":
suggest = "started_on"
elif key == "targetDatabaseName":
suggest = "target_database_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrationValidationDatabaseSummaryResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrationValidationDatabaseSummaryResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrationValidationDatabaseSummaryResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ended_on: str,
id: str,
migration_id: str,
source_database_name: str,
started_on: str,
status: str,
target_database_name: str):
"""
Migration Validation Database level summary result
:param str ended_on: Validation end time
:param str id: Result identifier
:param str migration_id: Migration Identifier
:param str source_database_name: Name of the source database
:param str started_on: Validation start time
:param str status: Current status of validation at the database level
:param str target_database_name: Name of the target database
"""
pulumi.set(__self__, "ended_on", ended_on)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "migration_id", migration_id)
pulumi.set(__self__, "source_database_name", source_database_name)
pulumi.set(__self__, "started_on", started_on)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "target_database_name", target_database_name)
@property
@pulumi.getter(name="endedOn")
def ended_on(self) -> str:
"""
Validation end time
"""
return pulumi.get(self, "ended_on")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="migrationId")
def migration_id(self) -> str:
"""
Migration Identifier
"""
return pulumi.get(self, "migration_id")
@property
@pulumi.getter(name="sourceDatabaseName")
def source_database_name(self) -> str:
"""
Name of the source database
"""
return pulumi.get(self, "source_database_name")
@property
@pulumi.getter(name="startedOn")
def started_on(self) -> str:
"""
Validation start time
"""
return pulumi.get(self, "started_on")
@property
@pulumi.getter
def status(self) -> str:
"""
Current status of validation at the database level
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="targetDatabaseName")
def target_database_name(self) -> str:
"""
Name of the target database
"""
return pulumi.get(self, "target_database_name")
@pulumi.output_type
class MigrationValidationOptionsResponse(dict):
"""
Types of validations to run after the migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableDataIntegrityValidation":
suggest = "enable_data_integrity_validation"
elif key == "enableQueryAnalysisValidation":
suggest = "enable_query_analysis_validation"
elif key == "enableSchemaValidation":
suggest = "enable_schema_validation"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MigrationValidationOptionsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MigrationValidationOptionsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MigrationValidationOptionsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_data_integrity_validation: Optional[bool] = None,
enable_query_analysis_validation: Optional[bool] = None,
enable_schema_validation: Optional[bool] = None):
"""
Types of validations to run after the migration
:param bool enable_data_integrity_validation: Allows to perform a checksum based data integrity validation between source and target for the selected database / tables .
:param bool enable_query_analysis_validation: Allows to perform a quick and intelligent query analysis by retrieving queries from the source database and executes them in the target. The result will have execution statistics for executions in source and target databases for the extracted queries.
:param bool enable_schema_validation: Allows to compare the schema information between source and target.
"""
if enable_data_integrity_validation is not None:
pulumi.set(__self__, "enable_data_integrity_validation", enable_data_integrity_validation)
if enable_query_analysis_validation is not None:
pulumi.set(__self__, "enable_query_analysis_validation", enable_query_analysis_validation)
if enable_schema_validation is not None:
pulumi.set(__self__, "enable_schema_validation", enable_schema_validation)
@property
@pulumi.getter(name="enableDataIntegrityValidation")
def enable_data_integrity_validation(self) -> Optional[bool]:
"""
Allows to perform a checksum based data integrity validation between source and target for the selected database / tables .
"""
return pulumi.get(self, "enable_data_integrity_validation")
@property
@pulumi.getter(name="enableQueryAnalysisValidation")
def enable_query_analysis_validation(self) -> Optional[bool]:
"""
Allows to perform a quick and intelligent query analysis by retrieving queries from the source database and executes them in the target. The result will have execution statistics for executions in source and target databases for the extracted queries.
"""
return pulumi.get(self, "enable_query_analysis_validation")
@property
@pulumi.getter(name="enableSchemaValidation")
def enable_schema_validation(self) -> Optional[bool]:
"""
Allows to compare the schema information between source and target.
"""
return pulumi.get(self, "enable_schema_validation")
@pulumi.output_type
class MySqlConnectionInfoResponse(dict):
"""
Information for connecting to MySQL server
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverName":
suggest = "server_name"
elif key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in MySqlConnectionInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
MySqlConnectionInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
MySqlConnectionInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
port: int,
server_name: str,
type: str,
password: Optional[str] = None,
user_name: Optional[str] = None):
"""
Information for connecting to MySQL server
:param int port: Port for Server
:param str server_name: Name of the server
:param str type: Type of connection info
Expected value is 'MySqlConnectionInfo'.
:param str password: Password credential.
:param str user_name: User name
"""
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "server_name", server_name)
pulumi.set(__self__, "type", 'MySqlConnectionInfo')
if password is not None:
pulumi.set(__self__, "password", password)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter
def port(self) -> int:
"""
Port for Server
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="serverName")
def server_name(self) -> str:
"""
Name of the server
"""
return pulumi.get(self, "server_name")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of connection info
Expected value is 'MySqlConnectionInfo'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
Password credential.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
"""
User name
"""
return pulumi.get(self, "user_name")
@pulumi.output_type
class ODataErrorResponse(dict):
"""
Error information in OData format.
"""
def __init__(__self__, *,
code: str,
details: Sequence['outputs.ODataErrorResponse'],
message: str):
"""
Error information in OData format.
:param str code: The machine-readable description of the error, such as 'InvalidRequest' or 'InternalServerError'
:param Sequence['ODataErrorResponse'] details: Inner errors that caused this error
:param str message: The human-readable description of the error
"""
pulumi.set(__self__, "code", code)
pulumi.set(__self__, "details", details)
pulumi.set(__self__, "message", message)
@property
@pulumi.getter
def code(self) -> str:
"""
The machine-readable description of the error, such as 'InvalidRequest' or 'InternalServerError'
"""
return pulumi.get(self, "code")
@property
@pulumi.getter
def details(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Inner errors that caused this error
"""
return pulumi.get(self, "details")
@property
@pulumi.getter
def message(self) -> str:
"""
The human-readable description of the error
"""
return pulumi.get(self, "message")
@pulumi.output_type
class OrphanedUserInfoResponse(dict):
"""
Information of orphaned users on the SQL server database.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "databaseName":
suggest = "database_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in OrphanedUserInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
OrphanedUserInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
OrphanedUserInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
database_name: Optional[str] = None,
name: Optional[str] = None):
"""
Information of orphaned users on the SQL server database.
:param str database_name: Parent database of the user
:param str name: Name of the orphaned user
"""
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[str]:
"""
Parent database of the user
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the orphaned user
"""
return pulumi.get(self, "name")
@pulumi.output_type
class PostgreSqlConnectionInfoResponse(dict):
"""
Information for connecting to PostgreSQL server
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "serverName":
suggest = "server_name"
elif key == "databaseName":
suggest = "database_name"
elif key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in PostgreSqlConnectionInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
PostgreSqlConnectionInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
PostgreSqlConnectionInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
port: int,
server_name: str,
type: str,
database_name: Optional[str] = None,
password: Optional[str] = None,
user_name: Optional[str] = None):
"""
Information for connecting to PostgreSQL server
:param int port: Port for Server
:param str server_name: Name of the server
:param str type: Type of connection info
Expected value is 'PostgreSqlConnectionInfo'.
:param str database_name: Name of the database
:param str password: Password credential.
:param str user_name: User name
"""
pulumi.set(__self__, "port", port)
pulumi.set(__self__, "server_name", server_name)
pulumi.set(__self__, "type", 'PostgreSqlConnectionInfo')
if database_name is not None:
pulumi.set(__self__, "database_name", database_name)
if password is not None:
pulumi.set(__self__, "password", password)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter
def port(self) -> int:
"""
Port for Server
"""
return pulumi.get(self, "port")
@property
@pulumi.getter(name="serverName")
def server_name(self) -> str:
"""
Name of the server
"""
return pulumi.get(self, "server_name")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of connection info
Expected value is 'PostgreSqlConnectionInfo'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="databaseName")
def database_name(self) -> Optional[str]:
"""
Name of the database
"""
return pulumi.get(self, "database_name")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
Password credential.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
"""
User name
"""
return pulumi.get(self, "user_name")
@pulumi.output_type
class ProjectFilePropertiesResponse(dict):
"""
Base class for file properties.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "lastModified":
suggest = "last_modified"
elif key == "filePath":
suggest = "file_path"
elif key == "mediaType":
suggest = "media_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ProjectFilePropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ProjectFilePropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ProjectFilePropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
last_modified: str,
size: float,
extension: Optional[str] = None,
file_path: Optional[str] = None,
media_type: Optional[str] = None):
"""
Base class for file properties.
:param str last_modified: Modification DateTime.
:param float size: File size.
:param str extension: Optional File extension. If submitted it should not have a leading period and must match the extension from filePath.
:param str file_path: Relative path of this file resource. This property can be set when creating or updating the file resource.
:param str media_type: File content type. This property can be modified to reflect the file content type.
"""
pulumi.set(__self__, "last_modified", last_modified)
pulumi.set(__self__, "size", size)
if extension is not None:
pulumi.set(__self__, "extension", extension)
if file_path is not None:
pulumi.set(__self__, "file_path", file_path)
if media_type is not None:
pulumi.set(__self__, "media_type", media_type)
@property
@pulumi.getter(name="lastModified")
def last_modified(self) -> str:
"""
Modification DateTime.
"""
return pulumi.get(self, "last_modified")
@property
@pulumi.getter
def size(self) -> float:
"""
File size.
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def extension(self) -> Optional[str]:
"""
Optional File extension. If submitted it should not have a leading period and must match the extension from filePath.
"""
return pulumi.get(self, "extension")
@property
@pulumi.getter(name="filePath")
def file_path(self) -> Optional[str]:
"""
Relative path of this file resource. This property can be set when creating or updating the file resource.
"""
return pulumi.get(self, "file_path")
@property
@pulumi.getter(name="mediaType")
def media_type(self) -> Optional[str]:
"""
File content type. This property can be modified to reflect the file content type.
"""
return pulumi.get(self, "media_type")
@pulumi.output_type
class QueryAnalysisValidationResultResponse(dict):
"""
Results for query analysis comparison between the source and target
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "queryResults":
suggest = "query_results"
elif key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in QueryAnalysisValidationResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
QueryAnalysisValidationResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
QueryAnalysisValidationResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
query_results: 'outputs.QueryExecutionResultResponse',
validation_errors: 'outputs.ValidationErrorResponse'):
"""
Results for query analysis comparison between the source and target
:param 'QueryExecutionResultResponse' query_results: List of queries executed and it's execution results in source and target
:param 'ValidationErrorResponse' validation_errors: Errors that are part of the execution
"""
pulumi.set(__self__, "query_results", query_results)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter(name="queryResults")
def query_results(self) -> 'outputs.QueryExecutionResultResponse':
"""
List of queries executed and it's execution results in source and target
"""
return pulumi.get(self, "query_results")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> 'outputs.ValidationErrorResponse':
"""
Errors that are part of the execution
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class QueryExecutionResultResponse(dict):
"""
Describes query analysis results for execution in source and target
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "queryText":
suggest = "query_text"
elif key == "sourceResult":
suggest = "source_result"
elif key == "statementsInBatch":
suggest = "statements_in_batch"
elif key == "targetResult":
suggest = "target_result"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in QueryExecutionResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
QueryExecutionResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
QueryExecutionResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
query_text: str,
source_result: 'outputs.ExecutionStatisticsResponse',
statements_in_batch: float,
target_result: 'outputs.ExecutionStatisticsResponse'):
"""
Describes query analysis results for execution in source and target
:param str query_text: Query text retrieved from the source server
:param 'ExecutionStatisticsResponse' source_result: Query analysis result from the source
:param float statements_in_batch: Total no. of statements in the batch
:param 'ExecutionStatisticsResponse' target_result: Query analysis result from the target
"""
pulumi.set(__self__, "query_text", query_text)
pulumi.set(__self__, "source_result", source_result)
pulumi.set(__self__, "statements_in_batch", statements_in_batch)
pulumi.set(__self__, "target_result", target_result)
@property
@pulumi.getter(name="queryText")
def query_text(self) -> str:
"""
Query text retrieved from the source server
"""
return pulumi.get(self, "query_text")
@property
@pulumi.getter(name="sourceResult")
def source_result(self) -> 'outputs.ExecutionStatisticsResponse':
"""
Query analysis result from the source
"""
return pulumi.get(self, "source_result")
@property
@pulumi.getter(name="statementsInBatch")
def statements_in_batch(self) -> float:
"""
Total no. of statements in the batch
"""
return pulumi.get(self, "statements_in_batch")
@property
@pulumi.getter(name="targetResult")
def target_result(self) -> 'outputs.ExecutionStatisticsResponse':
"""
Query analysis result from the target
"""
return pulumi.get(self, "target_result")
@pulumi.output_type
class ReportableExceptionResponse(dict):
"""
Exception object for all custom exceptions
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "filePath":
suggest = "file_path"
elif key == "hResult":
suggest = "h_result"
elif key == "lineNumber":
suggest = "line_number"
elif key == "stackTrace":
suggest = "stack_trace"
elif key == "actionableMessage":
suggest = "actionable_message"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ReportableExceptionResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ReportableExceptionResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ReportableExceptionResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
file_path: str,
h_result: int,
line_number: str,
message: str,
stack_trace: str,
actionable_message: Optional[str] = None):
"""
Exception object for all custom exceptions
:param str file_path: The path to the file where exception occurred
:param int h_result: Coded numerical value that is assigned to a specific exception
:param str line_number: The line number where exception occurred
:param str message: Error message
:param str stack_trace: Stack trace
:param str actionable_message: Actionable steps for this exception
"""
pulumi.set(__self__, "file_path", file_path)
pulumi.set(__self__, "h_result", h_result)
pulumi.set(__self__, "line_number", line_number)
pulumi.set(__self__, "message", message)
pulumi.set(__self__, "stack_trace", stack_trace)
if actionable_message is not None:
pulumi.set(__self__, "actionable_message", actionable_message)
@property
@pulumi.getter(name="filePath")
def file_path(self) -> str:
"""
The path to the file where exception occurred
"""
return pulumi.get(self, "file_path")
@property
@pulumi.getter(name="hResult")
def h_result(self) -> int:
"""
Coded numerical value that is assigned to a specific exception
"""
return pulumi.get(self, "h_result")
@property
@pulumi.getter(name="lineNumber")
def line_number(self) -> str:
"""
The line number where exception occurred
"""
return pulumi.get(self, "line_number")
@property
@pulumi.getter
def message(self) -> str:
"""
Error message
"""
return pulumi.get(self, "message")
@property
@pulumi.getter(name="stackTrace")
def stack_trace(self) -> str:
"""
Stack trace
"""
return pulumi.get(self, "stack_trace")
@property
@pulumi.getter(name="actionableMessage")
def actionable_message(self) -> Optional[str]:
"""
Actionable steps for this exception
"""
return pulumi.get(self, "actionable_message")
@pulumi.output_type
class SchemaComparisonValidationResultResponse(dict):
"""
Results for schema comparison between the source and target
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "schemaDifferences":
suggest = "schema_differences"
elif key == "validationErrors":
suggest = "validation_errors"
elif key == "sourceDatabaseObjectCount":
suggest = "source_database_object_count"
elif key == "targetDatabaseObjectCount":
suggest = "target_database_object_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SchemaComparisonValidationResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SchemaComparisonValidationResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SchemaComparisonValidationResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
schema_differences: 'outputs.SchemaComparisonValidationResultTypeResponse',
validation_errors: 'outputs.ValidationErrorResponse',
source_database_object_count: Optional[Mapping[str, float]] = None,
target_database_object_count: Optional[Mapping[str, float]] = None):
"""
Results for schema comparison between the source and target
:param 'SchemaComparisonValidationResultTypeResponse' schema_differences: List of schema differences between the source and target databases
:param 'ValidationErrorResponse' validation_errors: List of errors that happened while performing schema compare validation
:param Mapping[str, float] source_database_object_count: Count of source database objects
:param Mapping[str, float] target_database_object_count: Count of target database objects
"""
pulumi.set(__self__, "schema_differences", schema_differences)
pulumi.set(__self__, "validation_errors", validation_errors)
if source_database_object_count is not None:
pulumi.set(__self__, "source_database_object_count", source_database_object_count)
if target_database_object_count is not None:
pulumi.set(__self__, "target_database_object_count", target_database_object_count)
@property
@pulumi.getter(name="schemaDifferences")
def schema_differences(self) -> 'outputs.SchemaComparisonValidationResultTypeResponse':
"""
List of schema differences between the source and target databases
"""
return pulumi.get(self, "schema_differences")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> 'outputs.ValidationErrorResponse':
"""
List of errors that happened while performing schema compare validation
"""
return pulumi.get(self, "validation_errors")
@property
@pulumi.getter(name="sourceDatabaseObjectCount")
def source_database_object_count(self) -> Optional[Mapping[str, float]]:
"""
Count of source database objects
"""
return pulumi.get(self, "source_database_object_count")
@property
@pulumi.getter(name="targetDatabaseObjectCount")
def target_database_object_count(self) -> Optional[Mapping[str, float]]:
"""
Count of target database objects
"""
return pulumi.get(self, "target_database_object_count")
@pulumi.output_type
class SchemaComparisonValidationResultTypeResponse(dict):
"""
Description about the errors happen while performing migration validation
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "objectName":
suggest = "object_name"
elif key == "objectType":
suggest = "object_type"
elif key == "updateAction":
suggest = "update_action"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SchemaComparisonValidationResultTypeResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SchemaComparisonValidationResultTypeResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SchemaComparisonValidationResultTypeResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
object_name: str,
object_type: str,
update_action: str):
"""
Description about the errors happen while performing migration validation
:param str object_name: Name of the object that has the difference
:param str object_type: Type of the object that has the difference. e.g (Table/View/StoredProcedure)
:param str update_action: Update action type with respect to target
"""
pulumi.set(__self__, "object_name", object_name)
pulumi.set(__self__, "object_type", object_type)
pulumi.set(__self__, "update_action", update_action)
@property
@pulumi.getter(name="objectName")
def object_name(self) -> str:
"""
Name of the object that has the difference
"""
return pulumi.get(self, "object_name")
@property
@pulumi.getter(name="objectType")
def object_type(self) -> str:
"""
Type of the object that has the difference. e.g (Table/View/StoredProcedure)
"""
return pulumi.get(self, "object_type")
@property
@pulumi.getter(name="updateAction")
def update_action(self) -> str:
"""
Update action type with respect to target
"""
return pulumi.get(self, "update_action")
@pulumi.output_type
class SelectedCertificateInputResponse(dict):
"""
Info for certificate to be exported for TDE enabled databases.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "certificateName":
suggest = "certificate_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SelectedCertificateInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SelectedCertificateInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SelectedCertificateInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
certificate_name: str,
password: str):
"""
Info for certificate to be exported for TDE enabled databases.
:param str certificate_name: Name of certificate to be exported.
:param str password: Password to use for encrypting the exported certificate.
"""
pulumi.set(__self__, "certificate_name", certificate_name)
pulumi.set(__self__, "password", password)
@property
@pulumi.getter(name="certificateName")
def certificate_name(self) -> str:
"""
Name of certificate to be exported.
"""
return pulumi.get(self, "certificate_name")
@property
@pulumi.getter
def password(self) -> str:
"""
Password to use for encrypting the exported certificate.
"""
return pulumi.get(self, "password")
@pulumi.output_type
class ServiceSkuResponse(dict):
"""
An Azure SKU instance
"""
def __init__(__self__, *,
capacity: Optional[int] = None,
family: Optional[str] = None,
name: Optional[str] = None,
size: Optional[str] = None,
tier: Optional[str] = None):
"""
An Azure SKU instance
:param int capacity: The capacity of the SKU, if it supports scaling
:param str family: The SKU family, used when the service has multiple performance classes within a tier, such as 'A', 'D', etc. for virtual machines
:param str name: The unique name of the SKU, such as 'P3'
:param str size: The size of the SKU, used when the name alone does not denote a service size or when a SKU has multiple performance classes within a family, e.g. 'A1' for virtual machines
:param str tier: The tier of the SKU, such as 'Free', 'Basic', 'Standard', or 'Premium'
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if family is not None:
pulumi.set(__self__, "family", family)
if name is not None:
pulumi.set(__self__, "name", name)
if size is not None:
pulumi.set(__self__, "size", size)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[int]:
"""
The capacity of the SKU, if it supports scaling
"""
return pulumi.get(self, "capacity")
@property
@pulumi.getter
def family(self) -> Optional[str]:
"""
The SKU family, used when the service has multiple performance classes within a tier, such as 'A', 'D', etc. for virtual machines
"""
return pulumi.get(self, "family")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The unique name of the SKU, such as 'P3'
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def size(self) -> Optional[str]:
"""
The size of the SKU, used when the name alone does not denote a service size or when a SKU has multiple performance classes within a family, e.g. 'A1' for virtual machines
"""
return pulumi.get(self, "size")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
The tier of the SKU, such as 'Free', 'Basic', 'Standard', or 'Premium'
"""
return pulumi.get(self, "tier")
@pulumi.output_type
class SqlConnectionInfoResponse(dict):
"""
Information for connecting to SQL database server
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dataSource":
suggest = "data_source"
elif key == "additionalSettings":
suggest = "additional_settings"
elif key == "encryptConnection":
suggest = "encrypt_connection"
elif key == "trustServerCertificate":
suggest = "trust_server_certificate"
elif key == "userName":
suggest = "user_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SqlConnectionInfoResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SqlConnectionInfoResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SqlConnectionInfoResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
data_source: str,
type: str,
additional_settings: Optional[str] = None,
authentication: Optional[str] = None,
encrypt_connection: Optional[bool] = None,
password: Optional[str] = None,
platform: Optional[str] = None,
trust_server_certificate: Optional[bool] = None,
user_name: Optional[str] = None):
"""
Information for connecting to SQL database server
:param str data_source: Data source in the format Protocol:MachineName\SQLServerInstanceName,PortNumber
:param str type: Type of connection info
Expected value is 'SqlConnectionInfo'.
:param str additional_settings: Additional connection settings
:param str authentication: Authentication type to use for connection
:param bool encrypt_connection: Whether to encrypt the connection
:param str password: Password credential.
:param str platform: Server platform type for connection
:param bool trust_server_certificate: Whether to trust the server certificate
:param str user_name: User name
"""
pulumi.set(__self__, "data_source", data_source)
pulumi.set(__self__, "type", 'SqlConnectionInfo')
if additional_settings is not None:
pulumi.set(__self__, "additional_settings", additional_settings)
if authentication is not None:
pulumi.set(__self__, "authentication", authentication)
if encrypt_connection is None:
encrypt_connection = True
if encrypt_connection is not None:
pulumi.set(__self__, "encrypt_connection", encrypt_connection)
if password is not None:
pulumi.set(__self__, "password", password)
if platform is not None:
pulumi.set(__self__, "platform", platform)
if trust_server_certificate is None:
trust_server_certificate = False
if trust_server_certificate is not None:
pulumi.set(__self__, "trust_server_certificate", trust_server_certificate)
if user_name is not None:
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter(name="dataSource")
def data_source(self) -> str:
"""
Data source in the format Protocol:MachineName\SQLServerInstanceName,PortNumber
"""
return pulumi.get(self, "data_source")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of connection info
Expected value is 'SqlConnectionInfo'.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="additionalSettings")
def additional_settings(self) -> Optional[str]:
"""
Additional connection settings
"""
return pulumi.get(self, "additional_settings")
@property
@pulumi.getter
def authentication(self) -> Optional[str]:
"""
Authentication type to use for connection
"""
return pulumi.get(self, "authentication")
@property
@pulumi.getter(name="encryptConnection")
def encrypt_connection(self) -> Optional[bool]:
"""
Whether to encrypt the connection
"""
return pulumi.get(self, "encrypt_connection")
@property
@pulumi.getter
def password(self) -> Optional[str]:
"""
Password credential.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def platform(self) -> Optional[str]:
"""
Server platform type for connection
"""
return pulumi.get(self, "platform")
@property
@pulumi.getter(name="trustServerCertificate")
def trust_server_certificate(self) -> Optional[bool]:
"""
Whether to trust the server certificate
"""
return pulumi.get(self, "trust_server_certificate")
@property
@pulumi.getter(name="userName")
def user_name(self) -> Optional[str]:
"""
User name
"""
return pulumi.get(self, "user_name")
@pulumi.output_type
class StartMigrationScenarioServerRoleResultResponse(dict):
"""
Server role migration result
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "exceptionsAndWarnings":
suggest = "exceptions_and_warnings"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in StartMigrationScenarioServerRoleResultResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
StartMigrationScenarioServerRoleResultResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
StartMigrationScenarioServerRoleResultResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
exceptions_and_warnings: Sequence['outputs.ReportableExceptionResponse'],
name: str,
state: str):
"""
Server role migration result
:param Sequence['ReportableExceptionResponse'] exceptions_and_warnings: Migration exceptions and warnings.
:param str name: Name of server role.
:param str state: Current state of migration
"""
pulumi.set(__self__, "exceptions_and_warnings", exceptions_and_warnings)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="exceptionsAndWarnings")
def exceptions_and_warnings(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Migration exceptions and warnings.
"""
return pulumi.get(self, "exceptions_and_warnings")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of server role.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> str:
"""
Current state of migration
"""
return pulumi.get(self, "state")
@pulumi.output_type
class SyncMigrationDatabaseErrorEventResponse(dict):
"""
Database migration errors for online migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "eventText":
suggest = "event_text"
elif key == "eventTypeString":
suggest = "event_type_string"
elif key == "timestampString":
suggest = "timestamp_string"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SyncMigrationDatabaseErrorEventResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SyncMigrationDatabaseErrorEventResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SyncMigrationDatabaseErrorEventResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
event_text: str,
event_type_string: str,
timestamp_string: str):
"""
Database migration errors for online migration
:param str event_text: Event text.
:param str event_type_string: Event type.
:param str timestamp_string: String value of timestamp.
"""
pulumi.set(__self__, "event_text", event_text)
pulumi.set(__self__, "event_type_string", event_type_string)
pulumi.set(__self__, "timestamp_string", timestamp_string)
@property
@pulumi.getter(name="eventText")
def event_text(self) -> str:
"""
Event text.
"""
return pulumi.get(self, "event_text")
@property
@pulumi.getter(name="eventTypeString")
def event_type_string(self) -> str:
"""
Event type.
"""
return pulumi.get(self, "event_type_string")
@property
@pulumi.getter(name="timestampString")
def timestamp_string(self) -> str:
"""
String value of timestamp.
"""
return pulumi.get(self, "timestamp_string")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlDbSyncTaskPropertiesResponse(dict):
"""
Properties for task that validates migration input for SQL to Azure SQL DB sync migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlDbSyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlDbSyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlDbSyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ValidateSyncMigrationInputSqlServerTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ValidateSyncMigrationInputSqlServerTaskInputResponse'] = None):
"""
Properties for task that validates migration input for SQL to Azure SQL DB sync migrations
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ValidateSyncMigrationInputSqlServerTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ValidateMigrationInput.SqlServer.SqlDb.Sync'.
:param 'ValidateSyncMigrationInputSqlServerTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ValidateMigrationInput.SqlServer.SqlDb.Sync')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ValidateSyncMigrationInputSqlServerTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ValidateMigrationInput.SqlServer.SqlDb.Sync'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ValidateSyncMigrationInputSqlServerTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse(dict):
"""
Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "azureApp":
suggest = "azure_app"
elif key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "storageResourceId":
suggest = "storage_resource_id"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
elif key == "backupFileShare":
suggest = "backup_file_share"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
azure_app: 'outputs.AzureActiveDirectoryAppResponse',
selected_databases: Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
storage_resource_id: str,
target_connection_info: 'outputs.MiSqlConnectionInfoResponse',
backup_file_share: Optional['outputs.FileShareResponse'] = None):
"""
Input for task that migrates SQL Server databases to Azure SQL Database Managed Instance online scenario.
:param 'AzureActiveDirectoryAppResponse' azure_app: Azure Active Directory Application the DMS instance will use to connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage Account
:param Sequence['MigrateSqlServerSqlMIDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Connection information for source SQL Server
:param str storage_resource_id: Fully qualified resourceId of storage
:param 'MiSqlConnectionInfoResponse' target_connection_info: Connection information for Azure SQL Database Managed Instance
:param 'FileShareResponse' backup_file_share: Backup file share information for all selected databases.
"""
pulumi.set(__self__, "azure_app", azure_app)
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "storage_resource_id", storage_resource_id)
pulumi.set(__self__, "target_connection_info", target_connection_info)
if backup_file_share is not None:
pulumi.set(__self__, "backup_file_share", backup_file_share)
@property
@pulumi.getter(name="azureApp")
def azure_app(self) -> 'outputs.AzureActiveDirectoryAppResponse':
"""
Azure Active Directory Application the DMS instance will use to connect to the target instance of Azure SQL Database Managed Instance and the Azure Storage Account
"""
return pulumi.get(self, "azure_app")
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Connection information for source SQL Server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="storageResourceId")
def storage_resource_id(self) -> str:
"""
Fully qualified resourceId of storage
"""
return pulumi.get(self, "storage_resource_id")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.MiSqlConnectionInfoResponse':
"""
Connection information for Azure SQL Database Managed Instance
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="backupFileShare")
def backup_file_share(self) -> Optional['outputs.FileShareResponse']:
"""
Backup file share information for all selected databases.
"""
return pulumi.get(self, "backup_file_share")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse(dict):
"""
Output for task that validates migration input for Azure SQL Database Managed Instance online migration
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
name: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for task that validates migration input for Azure SQL Database Managed Instance online migration
:param str id: Database identifier
:param str name: Name of database
:param Sequence['ReportableExceptionResponse'] validation_errors: Errors associated with a selected database object
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter
def id(self) -> str:
"""
Database identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with a selected database object
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlMISyncTaskPropertiesResponse(dict):
"""
Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance sync scenario
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlMISyncTaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlMISyncTaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlMISyncTaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse'] = None):
"""
Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance sync scenario
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ValidateMigrationInput.SqlServer.AzureSqlDbMI.Sync.LRS'.
:param 'ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ValidateMigrationInput.SqlServer.AzureSqlDbMI.Sync.LRS')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ValidateMigrationInputSqlServerSqlMISyncTaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ValidateMigrationInput.SqlServer.AzureSqlDbMI.Sync.LRS'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ValidateMigrationInputSqlServerSqlMISyncTaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlMITaskInputResponse(dict):
"""
Input for task that validates migration input for SQL to Azure SQL Managed Instance
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backupBlobShare":
suggest = "backup_blob_share"
elif key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
elif key == "backupFileShare":
suggest = "backup_file_share"
elif key == "backupMode":
suggest = "backup_mode"
elif key == "selectedLogins":
suggest = "selected_logins"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlMITaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlMITaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlMITaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backup_blob_share: 'outputs.BlobShareResponse',
selected_databases: Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse',
backup_file_share: Optional['outputs.FileShareResponse'] = None,
backup_mode: Optional[str] = None,
selected_logins: Optional[Sequence[str]] = None):
"""
Input for task that validates migration input for SQL to Azure SQL Managed Instance
:param 'BlobShareResponse' backup_blob_share: SAS URI of Azure Storage Account Container to be used for storing backup files.
:param Sequence['MigrateSqlServerSqlMIDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Information for connecting to source
:param 'SqlConnectionInfoResponse' target_connection_info: Information for connecting to target
:param 'FileShareResponse' backup_file_share: Backup file share information for all selected databases.
:param str backup_mode: Backup Mode to specify whether to use existing backup or create new backup.
:param Sequence[str] selected_logins: Logins to migrate
"""
pulumi.set(__self__, "backup_blob_share", backup_blob_share)
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
if backup_file_share is not None:
pulumi.set(__self__, "backup_file_share", backup_file_share)
if backup_mode is not None:
pulumi.set(__self__, "backup_mode", backup_mode)
if selected_logins is not None:
pulumi.set(__self__, "selected_logins", selected_logins)
@property
@pulumi.getter(name="backupBlobShare")
def backup_blob_share(self) -> 'outputs.BlobShareResponse':
"""
SAS URI of Azure Storage Account Container to be used for storing backup files.
"""
return pulumi.get(self, "backup_blob_share")
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlMIDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to source
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@property
@pulumi.getter(name="backupFileShare")
def backup_file_share(self) -> Optional['outputs.FileShareResponse']:
"""
Backup file share information for all selected databases.
"""
return pulumi.get(self, "backup_file_share")
@property
@pulumi.getter(name="backupMode")
def backup_mode(self) -> Optional[str]:
"""
Backup Mode to specify whether to use existing backup or create new backup.
"""
return pulumi.get(self, "backup_mode")
@property
@pulumi.getter(name="selectedLogins")
def selected_logins(self) -> Optional[Sequence[str]]:
"""
Logins to migrate
"""
return pulumi.get(self, "selected_logins")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlMITaskOutputResponse(dict):
"""
Output for task that validates migration input for SQL to Azure SQL Managed Instance migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "backupFolderErrors":
suggest = "backup_folder_errors"
elif key == "backupShareCredentialsErrors":
suggest = "backup_share_credentials_errors"
elif key == "backupStorageAccountErrors":
suggest = "backup_storage_account_errors"
elif key == "existingBackupErrors":
suggest = "existing_backup_errors"
elif key == "restoreDatabaseNameErrors":
suggest = "restore_database_name_errors"
elif key == "databaseBackupInfo":
suggest = "database_backup_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlMITaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlMITaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlMITaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
backup_folder_errors: Sequence['outputs.ReportableExceptionResponse'],
backup_share_credentials_errors: Sequence['outputs.ReportableExceptionResponse'],
backup_storage_account_errors: Sequence['outputs.ReportableExceptionResponse'],
existing_backup_errors: Sequence['outputs.ReportableExceptionResponse'],
id: str,
name: str,
restore_database_name_errors: Sequence['outputs.ReportableExceptionResponse'],
database_backup_info: Optional['outputs.DatabaseBackupInfoResponse'] = None):
"""
Output for task that validates migration input for SQL to Azure SQL Managed Instance migrations
:param Sequence['ReportableExceptionResponse'] backup_folder_errors: Errors associated with the BackupFolder path
:param Sequence['ReportableExceptionResponse'] backup_share_credentials_errors: Errors associated with backup share user name and password credentials
:param Sequence['ReportableExceptionResponse'] backup_storage_account_errors: Errors associated with the storage account provided.
:param Sequence['ReportableExceptionResponse'] existing_backup_errors: Errors associated with existing backup files.
:param str id: Result identifier
:param str name: Name of database
:param Sequence['ReportableExceptionResponse'] restore_database_name_errors: Errors associated with the RestoreDatabaseName
:param 'DatabaseBackupInfoResponse' database_backup_info: Information about backup files when existing backup mode is used.
"""
pulumi.set(__self__, "backup_folder_errors", backup_folder_errors)
pulumi.set(__self__, "backup_share_credentials_errors", backup_share_credentials_errors)
pulumi.set(__self__, "backup_storage_account_errors", backup_storage_account_errors)
pulumi.set(__self__, "existing_backup_errors", existing_backup_errors)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "restore_database_name_errors", restore_database_name_errors)
if database_backup_info is not None:
pulumi.set(__self__, "database_backup_info", database_backup_info)
@property
@pulumi.getter(name="backupFolderErrors")
def backup_folder_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with the BackupFolder path
"""
return pulumi.get(self, "backup_folder_errors")
@property
@pulumi.getter(name="backupShareCredentialsErrors")
def backup_share_credentials_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with backup share user name and password credentials
"""
return pulumi.get(self, "backup_share_credentials_errors")
@property
@pulumi.getter(name="backupStorageAccountErrors")
def backup_storage_account_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with the storage account provided.
"""
return pulumi.get(self, "backup_storage_account_errors")
@property
@pulumi.getter(name="existingBackupErrors")
def existing_backup_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with existing backup files.
"""
return pulumi.get(self, "existing_backup_errors")
@property
@pulumi.getter
def id(self) -> str:
"""
Result identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="restoreDatabaseNameErrors")
def restore_database_name_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with the RestoreDatabaseName
"""
return pulumi.get(self, "restore_database_name_errors")
@property
@pulumi.getter(name="databaseBackupInfo")
def database_backup_info(self) -> Optional['outputs.DatabaseBackupInfoResponse']:
"""
Information about backup files when existing backup mode is used.
"""
return pulumi.get(self, "database_backup_info")
@pulumi.output_type
class ValidateMigrationInputSqlServerSqlMITaskPropertiesResponse(dict):
"""
Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "taskType":
suggest = "task_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateMigrationInputSqlServerSqlMITaskPropertiesResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateMigrationInputSqlServerSqlMITaskPropertiesResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateMigrationInputSqlServerSqlMITaskPropertiesResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
commands: Sequence[Any],
errors: Sequence['outputs.ODataErrorResponse'],
output: Sequence['outputs.ValidateMigrationInputSqlServerSqlMITaskOutputResponse'],
state: str,
task_type: str,
input: Optional['outputs.ValidateMigrationInputSqlServerSqlMITaskInputResponse'] = None):
"""
Properties for task that validates migration input for SQL to Azure SQL Database Managed Instance
:param Sequence[Union['MigrateMISyncCompleteCommandPropertiesResponse', 'MigrateSyncCompleteCommandPropertiesResponse']] commands: Array of command properties.
:param Sequence['ODataErrorResponse'] errors: Array of errors. This is ignored if submitted.
:param Sequence['ValidateMigrationInputSqlServerSqlMITaskOutputResponse'] output: Task output. This is ignored if submitted.
:param str state: The state of the task. This is ignored if submitted.
:param str task_type: Task type.
Expected value is 'ValidateMigrationInput.SqlServer.AzureSqlDbMI'.
:param 'ValidateMigrationInputSqlServerSqlMITaskInputResponse' input: Task input
"""
pulumi.set(__self__, "commands", commands)
pulumi.set(__self__, "errors", errors)
pulumi.set(__self__, "output", output)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "task_type", 'ValidateMigrationInput.SqlServer.AzureSqlDbMI')
if input is not None:
pulumi.set(__self__, "input", input)
@property
@pulumi.getter
def commands(self) -> Sequence[Any]:
"""
Array of command properties.
"""
return pulumi.get(self, "commands")
@property
@pulumi.getter
def errors(self) -> Sequence['outputs.ODataErrorResponse']:
"""
Array of errors. This is ignored if submitted.
"""
return pulumi.get(self, "errors")
@property
@pulumi.getter
def output(self) -> Sequence['outputs.ValidateMigrationInputSqlServerSqlMITaskOutputResponse']:
"""
Task output. This is ignored if submitted.
"""
return pulumi.get(self, "output")
@property
@pulumi.getter
def state(self) -> str:
"""
The state of the task. This is ignored if submitted.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="taskType")
def task_type(self) -> str:
"""
Task type.
Expected value is 'ValidateMigrationInput.SqlServer.AzureSqlDbMI'.
"""
return pulumi.get(self, "task_type")
@property
@pulumi.getter
def input(self) -> Optional['outputs.ValidateMigrationInputSqlServerSqlMITaskInputResponse']:
"""
Task input
"""
return pulumi.get(self, "input")
@pulumi.output_type
class ValidateSyncMigrationInputSqlServerTaskInputResponse(dict):
"""
Input for task that validates migration input for SQL sync migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "selectedDatabases":
suggest = "selected_databases"
elif key == "sourceConnectionInfo":
suggest = "source_connection_info"
elif key == "targetConnectionInfo":
suggest = "target_connection_info"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateSyncMigrationInputSqlServerTaskInputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateSyncMigrationInputSqlServerTaskInputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateSyncMigrationInputSqlServerTaskInputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
selected_databases: Sequence['outputs.MigrateSqlServerSqlDbSyncDatabaseInputResponse'],
source_connection_info: 'outputs.SqlConnectionInfoResponse',
target_connection_info: 'outputs.SqlConnectionInfoResponse'):
"""
Input for task that validates migration input for SQL sync migrations
:param Sequence['MigrateSqlServerSqlDbSyncDatabaseInputResponse'] selected_databases: Databases to migrate
:param 'SqlConnectionInfoResponse' source_connection_info: Information for connecting to source SQL server
:param 'SqlConnectionInfoResponse' target_connection_info: Information for connecting to target
"""
pulumi.set(__self__, "selected_databases", selected_databases)
pulumi.set(__self__, "source_connection_info", source_connection_info)
pulumi.set(__self__, "target_connection_info", target_connection_info)
@property
@pulumi.getter(name="selectedDatabases")
def selected_databases(self) -> Sequence['outputs.MigrateSqlServerSqlDbSyncDatabaseInputResponse']:
"""
Databases to migrate
"""
return pulumi.get(self, "selected_databases")
@property
@pulumi.getter(name="sourceConnectionInfo")
def source_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to source SQL server
"""
return pulumi.get(self, "source_connection_info")
@property
@pulumi.getter(name="targetConnectionInfo")
def target_connection_info(self) -> 'outputs.SqlConnectionInfoResponse':
"""
Information for connecting to target
"""
return pulumi.get(self, "target_connection_info")
@pulumi.output_type
class ValidateSyncMigrationInputSqlServerTaskOutputResponse(dict):
"""
Output for task that validates migration input for SQL sync migrations
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "validationErrors":
suggest = "validation_errors"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ValidateSyncMigrationInputSqlServerTaskOutputResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ValidateSyncMigrationInputSqlServerTaskOutputResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ValidateSyncMigrationInputSqlServerTaskOutputResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
id: str,
name: str,
validation_errors: Sequence['outputs.ReportableExceptionResponse']):
"""
Output for task that validates migration input for SQL sync migrations
:param str id: Database identifier
:param str name: Name of database
:param Sequence['ReportableExceptionResponse'] validation_errors: Errors associated with a selected database object
"""
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "validation_errors", validation_errors)
@property
@pulumi.getter
def id(self) -> str:
"""
Database identifier
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
Name of database
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="validationErrors")
def validation_errors(self) -> Sequence['outputs.ReportableExceptionResponse']:
"""
Errors associated with a selected database object
"""
return pulumi.get(self, "validation_errors")
@pulumi.output_type
class ValidationErrorResponse(dict):
"""
Description about the errors happen while performing migration validation
"""
def __init__(__self__, *,
severity: str,
text: str):
"""
Description about the errors happen while performing migration validation
:param str severity: Severity of the error
:param str text: Error Text
"""
pulumi.set(__self__, "severity", severity)
pulumi.set(__self__, "text", text)
@property
@pulumi.getter
def severity(self) -> str:
"""
Severity of the error
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def text(self) -> str:
"""
Error Text
"""
return pulumi.get(self, "text")
@pulumi.output_type
class WaitStatisticsResponse(dict):
"""
Wait statistics gathered during query batch execution
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "waitCount":
suggest = "wait_count"
elif key == "waitTimeMs":
suggest = "wait_time_ms"
elif key == "waitType":
suggest = "wait_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in WaitStatisticsResponse. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
WaitStatisticsResponse.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
WaitStatisticsResponse.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
wait_count: float,
wait_time_ms: float,
wait_type: str):
"""
Wait statistics gathered during query batch execution
:param float wait_count: Total no. of waits
:param float wait_time_ms: Total wait time in millisecond(s)
:param str wait_type: Type of the Wait
"""
pulumi.set(__self__, "wait_count", wait_count)
if wait_time_ms is None:
wait_time_ms = 0
pulumi.set(__self__, "wait_time_ms", wait_time_ms)
pulumi.set(__self__, "wait_type", wait_type)
@property
@pulumi.getter(name="waitCount")
def wait_count(self) -> float:
"""
Total no. of waits
"""
return pulumi.get(self, "wait_count")
@property
@pulumi.getter(name="waitTimeMs")
def wait_time_ms(self) -> float:
"""
Total wait time in millisecond(s)
"""
return pulumi.get(self, "wait_time_ms")
@property
@pulumi.getter(name="waitType")
def wait_type(self) -> str:
"""
Type of the Wait
"""
return pulumi.get(self, "wait_type")
| 37.041463
| 451
| 0.645302
|
b5fe941f7d6a7efd36926ec91d5fa15064b1d9d4
| 2,031
|
py
|
Python
|
pages/locators.py
|
MaxFix/stepik_autotest_course_final_project
|
28e92c05b35c1f03afb3309f24818c0d21952b95
|
[
"MIT"
] | null | null | null |
pages/locators.py
|
MaxFix/stepik_autotest_course_final_project
|
28e92c05b35c1f03afb3309f24818c0d21952b95
|
[
"MIT"
] | null | null | null |
pages/locators.py
|
MaxFix/stepik_autotest_course_final_project
|
28e92c05b35c1f03afb3309f24818c0d21952b95
|
[
"MIT"
] | null | null | null |
from selenium.webdriver.common.by import By
class BasePageLocators():
LOGIN_LINK = (By.CSS_SELECTOR, "#login_link")
LOGIN_LINK_INVALID = (By.CSS_SELECTOR, "#login_link_inc")
USER_ICON = (By.CSS_SELECTOR, ".icon-user")
class MainPageLocators():
LOGIN_LINK = (By.CSS_SELECTOR, "#login_link")
TOP_BASKET_LINK = (By.XPATH, '//div[contains(@class,"basket-mini")]/span[contains(@class,"btn-group")]/a')
class LoginPageLocators():
LOGIN_FORM = (By.ID, "login_form")
REGISTER_FORM = (By.ID, "register_form")
REGISTER_EMAIL = (By.CSS_SELECTOR, "#id_registration-email")
REGISTER_PASSWORD1 = (By.CSS_SELECTOR, "#id_registration-password1")
REGISTER_PASSWORD_TWO = (By.CSS_SELECTOR, "#id_registration-password2")
REGISTER_SUBMIT = (By.XPATH, '//button[@name="registration_submit" and @value="Register"]')
class ProductPageLocators():
ADD_TO_BASKET_BTN = (By.CLASS_NAME, "btn-add-to-basket")
PRODUCT_TITLE = (By.XPATH, '//div[contains(@class,"product_main")]/h1')
PRODUCT_PRICE = (By.XPATH, '//div[contains(@class,"product_main")]/p[@class="price_color"]')
PRODUCT_INSTOCK = (By.XPATH, '//div[contains(@class,"product_main")]/p/i[@class="icon-ok"]')
PRODUCT_SUCCESSFULLY_ADDED_TO_BASKET = (
By.XPATH, '//div[contains(@class,"alert-success")]/div[contains(@class,"alertinner")]/strong')
BASKET_TOTAL_MESSAGE = (
By.XPATH, '//div[contains(@class,"alert-info")]/div[contains(@class,"alertinner")]/p/strong')
TOP_BASKET_LINK = (By.XPATH, '//a[@class="btn btn-default"]')
class BasketPageLocators(object):
BASKET_TILE_EN = (By.XPATH, '//head/title[contains(text(),"Basket")]')
PAGE_ACTION_TITLE_EN =(By.XPATH, '//div[contains(@class,"page-header") and contains(@class,"action")]/h1['
'contains(text(),"Basket")]')
BASKET_ITEMS = (By.XPATH, '//div[@class="basket-items"]/div[@class="row"]')
BASKET_IS_EMPTY_MSG = (By.XPATH, '//div[@id="content_inner"]/p[contains(text(),"Your basket is empty.")]')
| 48.357143
| 110
| 0.676022
|
a82ed781ffeabe4c1567163e6f58a3b0ca6198ad
| 865
|
py
|
Python
|
website/website/views.py
|
bubai666sen/Visitor-Manager
|
9ecfa087f332d368e2a52dd812c272d30c3a55df
|
[
"MIT"
] | null | null | null |
website/website/views.py
|
bubai666sen/Visitor-Manager
|
9ecfa087f332d368e2a52dd812c272d30c3a55df
|
[
"MIT"
] | null | null | null |
website/website/views.py
|
bubai666sen/Visitor-Manager
|
9ecfa087f332d368e2a52dd812c272d30c3a55df
|
[
"MIT"
] | 1
|
2020-09-14T14:37:02.000Z
|
2020-09-14T14:37:02.000Z
|
from django.shortcuts import render
from CMS.models import *
def index(request):
background_image = None
try:
page = Page.objects.get(page=1,status=1)
try:
background_image = page.background_image.name.split("/")[-1]
except:
pass
except:
page = None
data = {
'page': page,
'background_image': background_image,
}
return render(request,'index.html',data)
def about(request):
background_image = None
try:
page = Page.objects.get(page=2,status=1)
try:
background_image = page.background_image.name.split("/")[-1]
except:
pass
except:
page = None
data = {
'active':'about',
'page': page,
'background_image': background_image,
}
return render(request,'about.html',data)
| 25.441176
| 72
| 0.572254
|
57599217230386b049b3e9e325ff6e8e4f414071
| 396
|
py
|
Python
|
fourth-year/EGC/EGC-1230-julgomrod/decide/base/management/commands/register_uvus.py
|
JulianGR/university
|
2f643825b238892d602baf0c8e71e4c1b0fdefc2
|
[
"MIT"
] | null | null | null |
fourth-year/EGC/EGC-1230-julgomrod/decide/base/management/commands/register_uvus.py
|
JulianGR/university
|
2f643825b238892d602baf0c8e71e4c1b0fdefc2
|
[
"MIT"
] | null | null | null |
fourth-year/EGC/EGC-1230-julgomrod/decide/base/management/commands/register_uvus.py
|
JulianGR/university
|
2f643825b238892d602baf0c8e71e4c1b0fdefc2
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand, CommandError
from base.mods import register_status
class RegisterCommand(BaseCommand):
help = 'Returns the ID fot a user'
#def add_arguments(self, parser):
# parser.add_argument('uvus', nargs='+', type=str)
def handle(self, *args, **options):
#uvus=str(options['uvus'][0])
register_status('REGISTERED')
| 26.4
| 65
| 0.689394
|
0e9efe247f8986b465a4b2f305f1592bf4f46906
| 2,639
|
py
|
Python
|
python/sdk/client/models/alert_condition_metric_type.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/models/alert_condition_metric_type.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/models/alert_condition_metric_type.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Merlin
API Guide for accessing Merlin's model management, deployment, and serving functionalities # noqa: E501
OpenAPI spec version: 0.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class AlertConditionMetricType(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
allowed enum values
"""
THROUGHPUT = "throughput"
LATENCY = "latency"
ERROR_RATE = "error_rate"
CPU = "cpu"
MEMORY = "memory"
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""AlertConditionMetricType - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AlertConditionMetricType, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AlertConditionMetricType):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.206186
| 108
| 0.558924
|
136338c2c010f26f4a1d4ad9d3f0eca662bb6e07
| 10,120
|
py
|
Python
|
multimedia/south_migrations/0002_auto__add_field_media_thumbnail_image__add_field_media_auto_thumbnail_.py
|
jbittel/django-multimedia
|
4ddd5e6d9f4f680e2f4f68cc3616ced8f0fc2a43
|
[
"BSD-3-Clause"
] | 19
|
2015-01-28T08:40:20.000Z
|
2021-12-18T11:55:58.000Z
|
multimedia/migrations/0002_auto__add_field_media_thumbnail_image__add_field_media_auto_thumbnail_.py
|
kamilion/django-multimedia
|
702a61e03ec9639743684a3979525529a875b092
|
[
"BSD-3-Clause"
] | 2
|
2015-02-09T17:03:24.000Z
|
2015-04-22T17:57:45.000Z
|
multimedia/migrations/0002_auto__add_field_media_thumbnail_image__add_field_media_auto_thumbnail_.py
|
kamilion/django-multimedia
|
702a61e03ec9639743684a3979525529a875b092
|
[
"BSD-3-Clause"
] | 4
|
2015-02-02T14:05:08.000Z
|
2016-09-14T00:44:55.000Z
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Media.thumbnail_image'
db.add_column('multimedia_media', 'thumbnail_image', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['filer.Image'], null=True, blank=True), keep_default=False)
# Adding field 'Media.auto_thumbnail'
db.add_column('multimedia_media', 'auto_thumbnail', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
# Adding field 'Media.thumbnail_offset'
db.add_column('multimedia_media', 'thumbnail_offset', self.gf('django.db.models.fields.PositiveIntegerField')(default=4, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'Media.thumbnail_image'
db.delete_column('multimedia_media', 'thumbnail_image_id')
# Deleting field 'Media.auto_thumbnail'
db.delete_column('multimedia_media', 'auto_thumbnail')
# Deleting field 'Media.thumbnail_offset'
db.delete_column('multimedia_media', 'thumbnail_offset')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'filer.file': {
'Meta': {'object_name': 'File'},
'_file_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_file_type_plugin_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'folder': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'all_files'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'has_all_mandatory_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'original_filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'owned_files'", 'null': 'True', 'to': "orm['auth.User']"}),
'sha1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '40', 'blank': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.folder': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('parent', 'name'),)", 'object_name': 'Folder'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filer_owned_folders'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['filer.Folder']"}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'uploaded_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'filer.image': {
'Meta': {'object_name': 'Image', '_ormbases': ['filer.File']},
'_height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'_width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'author': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'date_taken': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'default_alt_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_caption': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'file_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['filer.File']", 'unique': 'True', 'primary_key': 'True'}),
'must_always_publish_author_credit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'must_always_publish_copyright': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'subject_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'multimedia.media': {
'Meta': {'ordering': "('-date_added',)", 'object_name': 'Media'},
'auto_thumbnail': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date_added': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'media_type': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'thumbnail_image': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['filer.Image']", 'null': 'True', 'blank': 'True'}),
'thumbnail_offset': ('django.db.models.fields.PositiveIntegerField', [], {'default': '4', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
}
}
complete_apps = ['multimedia']
| 77.251908
| 182
| 0.577569
|
9569d90c2a91ff07ecbcd69037f3bed5e7f60341
| 8,396
|
py
|
Python
|
irodsapi/irodsApisForWp8/projects.py
|
lexis-project/ddi-service-apis
|
9e96c4159154d70613b1977a8ea28374c038b463
|
[
"Apache-2.0"
] | null | null | null |
irodsapi/irodsApisForWp8/projects.py
|
lexis-project/ddi-service-apis
|
9e96c4159154d70613b1977a8ea28374c038b463
|
[
"Apache-2.0"
] | null | null | null |
irodsapi/irodsApisForWp8/projects.py
|
lexis-project/ddi-service-apis
|
9e96c4159154d70613b1977a8ea28374c038b463
|
[
"Apache-2.0"
] | null | null | null |
import irods.exception
from irods.session import iRODSSession
from irods.access import iRODSAccess
from irods.exception import (CAT_INVALID_USER)
import hashlib
from . import group
from . import utils
import logging
logger = logging.getLogger(__name__)
def create_project(session, zone, project_name):
"""Create a project on the DDI. This includes creating the necessary directories, groups, and setting the rights
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
"""
proj_hash = utils.hash( project_name)
coll_user = "/%s/user/%s" % (zone, proj_hash)
coll_project = "/%s/project/%s" % (zone, proj_hash)
coll_public = "/%s/public/%s" % (zone, proj_hash)
session.collections.create(coll_user)
session.collections.get(coll_user).metadata.add('ShortProject', project_name)
session.collections.create(coll_project)
session.collections.get(coll_project).metadata.add('ShortProject', project_name)
session.collections.create(coll_public)
session.collections.get(coll_public).metadata.add('ShortProject', project_name)
acl_project = iRODSAccess('inherit', coll_project)
acl_public = iRODSAccess('inherit', coll_public)
session.permissions.set(acl_project)
session.permissions.set(acl_public)
acl_project_rodsadmin = iRODSAccess('own', coll_project, "rodsadmin")
session.permissions.set(acl_project_rodsadmin)
acl_public_rodsadmin = iRODSAccess('own', coll_public, "rodsadmin")
session.permissions.set(acl_public_rodsadmin)
group.create_project_group(session, project_name)
group.set_project_group_rights(session, zone, project_name)
group.create_project_admin_group(session, project_name)
group.set_project_group_adm_rights(session, zone, project_name)
def remove_project(session, zone, project_name):
"""Delete a project on the DDI. This includes deleting the necessary directories, groups, and revoking the rights
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
"""
proj_hash = "proj" + hashlib.md5(project_name.encode()).hexdigest()
coll_user = "/%s/user/%s" % (zone, proj_hash)
coll_project = "/%s/project/%s" % (zone, proj_hash)
session.collections.remove(coll_user, recurse=True, force=True)
session.collections.remove(coll_project, recurse=True, force=True)
project_group = session.user_groups.get(project_name)
project_admin_group = session.user_groups.get(project_name+"_mgr")
project_group.remove()
project_admin_group.remove()
def create_new_user_directories(session, zone, project_name, user, federated_zones):
"""Create the necessary directories for a new user on the DDI. This includes setting the rights for the newly created directories
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
"""
proj_hash = utils.hash( project_name)
coll_user = "/%s/user/%s/%s" % (zone, proj_hash, user)
session.collections.create(coll_user)
acl_inherit = iRODSAccess('inherit', coll_user)
session.permissions.set(acl_inherit)
acl_user = iRODSAccess("own", coll_user, user, zone)
session.permissions.set(acl_user)
for fed_zone in federated_zones:
username = user + "#" + fed_zone
acl_user_fed = iRODSAccess("own", coll_user, user, fed_zone)
try:
session.permissions.set(acl_user_fed)
except irods.exception.iRODSException as e:
logger.error("Error giving access to user directories for federated user {0}: {1}".format(username, e))
def remove_user_directories(session, zone, project_name, user):
"""Remove the necessary directories for a user on the DDI. This includes revoking the rights for his/her user directories
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
"""
proj_hash = utils.hash( project_name)
coll_user = "/%s/user/%s/%s" % (zone, proj_hash, user)
session.collections.remove(coll_user, recurse=True, force=True)
def update_new_admin_user_directories(session, zone, project_name, user, federated_zones):
"""Create the necessary directories for a new admin user on the DDI. This includes setting the rights for the newly created directories
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
"""
proj_hash = utils.hash( project_name)
coll_user = "/%s/user/%s/%s" % (zone, proj_hash, user)
coll_project = "/%s/project/%s" % (zone, proj_hash)
acl_user = iRODSAccess("write", coll_user, user, zone)
acl_project = iRODSAccess("write", coll_project, user, zone)
session.permissions.set(acl_user)
session.permissions.set(acl_project)
for fed_zone in federated_zones:
username = user + "#" + fed_zone
acl_user = iRODSAccess("write", coll_user, user, fed_zone)
try:
session.permissions.set(acl_user)
except irods.exception.iRODSException as e:
logger.error("Error giving admin access to user directories for federated user {0}: {1}".format(username, e))
acl_project = iRODSAccess("write", coll_project, user, fed_zone)
try:
session.permissions.set(acl_project)
except irods.exception.iRODSException as e:
logger.error("Error giving admin access to user directories for federated user {0}: {1}".format(username, e))
def create_new_admin_user_directories(session, zone, project_name, user, federated_zones):
"""Create the necessary directories for a new admin user on the DDI. This includes setting the rights for the newly created directories
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
"""
proj_hash = utils.hash( project_name)
coll_user = "/%s/user/%s/%s" % (zone, proj_hash, user)
session.collections.create(coll_user)
acl_user = iRODSAccess("write", coll_user, user, zone)
session.permissions.set(acl_user)
for fed_zone in federated_zones:
username = user + "#" + fed_zone
acl_user = iRODSAccess("write", coll_user, user, fed_zone)
try:
session.permissions.set(acl_user)
except irods.exception.iRODSException as e:
logger.error("Error giving admin access to user directories for federated user {0}: {1}".format(username, e))
def freeze_dataset(session, zone, project_name, dataset, user):
"""Freeze a dataset in the public directory. This can be only done by a project admin user i.e a member of the project admin group
Parameters
----------
session : iRODS session object
an object that setup a session with iRODS
zone: str
îRODS zone to create the user on
project_name: str
The name of the project.
dataset: str
The name of the dataset to be frozen
user: str
The user that wants to freeze the dataset
"""
admin_group = session.user_groups.get(project_name+"_mgr")
members = admin_group.members
proj_hash = utils.hash( project_name)
for a_user in members:
if a_user.name == user:
coll_dataset = "/%s/public/%s/%s" % (zone, proj_hash, dataset)
acl_dataset_admin = iRODSAccess("null", coll_dataset, admin_group, zone)
acl_dataset = iRODSAccess("null", coll_dataset, project_name, zone)
session.permissions.set(acl_dataset_admin)
session.permissions.set(acl_dataset)
else:
print("Operation is only supported for project admins")
| 37.482143
| 139
| 0.691758
|
512991571e5ec059474a5a8bec40c4f55d9f9c31
| 1,427
|
py
|
Python
|
nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py
|
dPys/nipype
|
75030b29297808e7c9a9e91b411b685154dff60b
|
[
"Apache-2.0"
] | 1
|
2019-03-25T14:11:18.000Z
|
2019-03-25T14:11:18.000Z
|
nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py
|
dPys/nipype
|
75030b29297808e7c9a9e91b411b685154dff60b
|
[
"Apache-2.0"
] | 1
|
2017-01-05T01:24:33.000Z
|
2017-01-05T01:24:33.000Z
|
nipype/interfaces/slicer/quantification/tests/test_auto_IntensityDifferenceMetric.py
|
wtriplett/nipype
|
388f140fceaf55438a987e9cdfa2a8e995428afd
|
[
"Apache-2.0"
] | 1
|
2020-12-16T16:36:48.000Z
|
2020-12-16T16:36:48.000Z
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..changequantification import IntensityDifferenceMetric
def test_IntensityDifferenceMetric_inputs():
input_map = dict(
args=dict(argstr="%s",),
baselineSegmentationVolume=dict(argstr="%s", extensions=None, position=-3,),
baselineVolume=dict(argstr="%s", extensions=None, position=-4,),
changingBandSize=dict(argstr="--changingBandSize %d",),
environ=dict(nohash=True, usedefault=True,),
followupVolume=dict(argstr="%s", extensions=None, position=-2,),
outputVolume=dict(argstr="%s", hash_files=False, position=-1,),
reportFileName=dict(argstr="--reportFileName %s", hash_files=False,),
sensitivityThreshold=dict(argstr="--sensitivityThreshold %f",),
)
inputs = IntensityDifferenceMetric.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_IntensityDifferenceMetric_outputs():
output_map = dict(
outputVolume=dict(extensions=None, position=-1,),
reportFileName=dict(extensions=None,),
)
outputs = IntensityDifferenceMetric.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 41.970588
| 84
| 0.685354
|
8cfea7362d17884efcd9a43ae71147313bd0f4d2
| 8,335
|
py
|
Python
|
docs/conf.py
|
takahi-i/pfm
|
224ca961ca43f50bd877789e2d8659ae838d517f
|
[
"MIT"
] | 9
|
2018-01-06T05:44:43.000Z
|
2020-06-24T00:15:16.000Z
|
docs/conf.py
|
takahi-i/pfm
|
224ca961ca43f50bd877789e2d8659ae838d517f
|
[
"MIT"
] | 27
|
2018-01-06T09:29:48.000Z
|
2020-04-10T16:11:59.000Z
|
docs/conf.py
|
takahi-i/pfm
|
224ca961ca43f50bd877789e2d8659ae838d517f
|
[
"MIT"
] | 1
|
2018-01-09T01:33:42.000Z
|
2018-01-09T01:33:42.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# pfm documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import pfm
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pfm'
copyright = u"2018, Takahiko Ito"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = pfm.__version__
# The full version, including alpha/beta/rc tags.
release = pfm.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pfmdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'pfm.tex',
u'pfm Documentation',
u'Takahiko Ito', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pfm',
u'pfm Documentation',
[u'Takahiko Ito'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pfm',
u'pfm Documentation',
u'Takahiko Ito',
'pfm',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| 30.199275
| 76
| 0.713257
|
8b99af3e24c32727a883b6fa87c338e95be33771
| 3,343
|
py
|
Python
|
ibm-xforce-malware/operations.py
|
fortinet-fortisoar/connector-ibm-xforce-malware
|
624346dc580c4c97430e76e35d2ed4f2700b188e
|
[
"MIT"
] | null | null | null |
ibm-xforce-malware/operations.py
|
fortinet-fortisoar/connector-ibm-xforce-malware
|
624346dc580c4c97430e76e35d2ed4f2700b188e
|
[
"MIT"
] | null | null | null |
ibm-xforce-malware/operations.py
|
fortinet-fortisoar/connector-ibm-xforce-malware
|
624346dc580c4c97430e76e35d2ed4f2700b188e
|
[
"MIT"
] | 1
|
2021-12-14T13:08:26.000Z
|
2021-12-14T13:08:26.000Z
|
"""
Copyright start
Copyright (C) 2008 - 2021 Fortinet Inc.
All rights reserved.
FORTINET CONFIDENTIAL & FORTINET PROPRIETARY SOURCE CODE
Copyright end
"""
import base64
import requests
from connectors.core.connector import get_logger, ConnectorError
logger = get_logger('ibm-xforce-malware')
class IBMXMalware(object):
def __init__(self, config):
self.server_url = config.get('server_url')
if not self.server_url.startswith('https://'):
self.server_url = 'https://' + self.server_url
if not self.server_url.endswith('/'):
self.server_url += '/'
self.api_key = config.get('api_key')
self.api_password = config.get('api_password')
self.verify_ssl = config.get('verify_ssl')
def make_request(self, endpoint=None, method='GET', data=None, params=None, files=None):
try:
url = self.server_url + endpoint
b64_credential = base64.b64encode((self.api_key + ":" + self.api_password).encode('utf-8')).decode()
headers = {'Authorization': "Basic " + b64_credential, 'Content-Type': 'application/json'}
response = requests.request(method, url, params=params, files=files, data=data, headers=headers,
verify=self.verify_ssl)
if response.status_code == 200:
return response.json()
else:
logger.error(response.text)
raise ConnectorError({'status_code': response.status_code, 'message': response.reason})
except requests.exceptions.SSLError:
raise ConnectorError('SSL certificate validation failed')
except requests.exceptions.ConnectTimeout:
raise ConnectorError('The request timed out while trying to connect to the server')
except requests.exceptions.ReadTimeout:
raise ConnectorError('The server did not send any data in the allotted amount of time')
except requests.exceptions.ConnectionError:
raise ConnectorError('Invalid endpoint or credentials')
except Exception as err:
logger.exception(str(err))
raise ConnectorError(str(err))
def get_malware_for_file_hash(config, params):
malware = IBMXMalware(config)
endpoint = 'malware/' + str(params.get('filehash'))
return malware.make_request(endpoint=endpoint)
def get_malware_for_family(config, params):
malware = IBMXMalware(config)
endpoint = 'malware/family/' + str(params.get('family'))
return malware.make_request(endpoint=endpoint)
def wildcard_search_malware_family(config, params):
malware = IBMXMalware(config)
endpoint = 'malware/familyext/' + str(params.get('family'))
return malware.make_request(endpoint=endpoint)
def _check_health(config):
try:
params = {'filehash': '474B9CCF5AB9D72CA8A333889BBB34F0'}
res = get_malware_for_file_hash(config, params)
if res:
logger.info('connector available')
return True
except Exception as e:
logger.exception('{}'.format(e))
raise ConnectorError('{}'.format(e))
operations = {
'get_malware_for_file_hash': get_malware_for_file_hash,
'get_malware_for_family': get_malware_for_family,
'wildcard_search_malware_family': wildcard_search_malware_family
}
| 37.561798
| 112
| 0.671552
|
acd56a1c3efbfccca4c967d6bf09785bcba86178
| 775
|
py
|
Python
|
test/setup/models.py
|
JangasCodingplace/flask-neo4j-lite
|
bcbb1a11c406d36e5d51f9ee3f329e7e6755b16c
|
[
"MIT"
] | null | null | null |
test/setup/models.py
|
JangasCodingplace/flask-neo4j-lite
|
bcbb1a11c406d36e5d51f9ee3f329e7e6755b16c
|
[
"MIT"
] | null | null | null |
test/setup/models.py
|
JangasCodingplace/flask-neo4j-lite
|
bcbb1a11c406d36e5d51f9ee3f329e7e6755b16c
|
[
"MIT"
] | null | null | null |
from flask_neo4j_lite.config import NeoConfig
from py2neo import Graph, NodeMatcher, RelationshipMatcher
NeoConfig.graph = Graph(password="neo4JPassword", port="11005")
NeoConfig.matcher = NodeMatcher(NeoConfig.graph)
NeoConfig.relationship_matcher = RelationshipMatcher(NeoConfig.graph)
from py2neo.ogm import (Property, RelatedTo, RelatedFrom)
from flask_neo4j_lite.manager import Neo4JManager
from flask_neo4j_lite.config import models
class Movie(Neo4JManager):
__primarykey__ = "title"
title = Property()
tagline = Property()
actors = RelatedFrom("Person", "ACTED_IN")
class Person(Neo4JManager):
__primarykey__ = "name"
name = Property()
acted_in = RelatedTo(Movie, "ACTED_IN")
models['Movie'] = Movie
models['Person'] = Person
| 23.484848
| 69
| 0.762581
|
af417509e6fb813207fba22d28f8118c64d4226c
| 1,471
|
py
|
Python
|
mayan/apps/dynamic_search/settings.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 2
|
2021-09-12T19:41:19.000Z
|
2021-09-12T19:41:20.000Z
|
mayan/apps/dynamic_search/settings.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 37
|
2021-09-13T01:00:12.000Z
|
2021-10-02T03:54:30.000Z
|
mayan/apps/dynamic_search/settings.py
|
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
|
0e4e919fd2e1ded6711354a0330135283e87f8c7
|
[
"Apache-2.0"
] | 1
|
2021-09-22T13:17:30.000Z
|
2021-09-22T13:17:30.000Z
|
from django.utils.translation import ugettext_lazy as _
from mayan.apps.smart_settings.classes import SettingNamespace
from .literals import (
DEFAULT_SEARCH_BACKEND, DEFAULT_SEARCH_BACKEND_ARGUMENTS,
DEFAULT_SEARCH_DISABLE_SIMPLE_SEARCH,
DEFAULT_SEARCH_MATCH_ALL_DEFAULT_VALUE, DEFAULT_SEARCH_RESULTS_LIMIT
)
namespace = SettingNamespace(label=_('Search'), name='search')
setting_backend = namespace.add_setting(
default=DEFAULT_SEARCH_BACKEND, global_name='SEARCH_BACKEND',
help_text=_(
'Full path to the backend to be used to handle the search.'
)
)
setting_backend_arguments = namespace.add_setting(
default=DEFAULT_SEARCH_BACKEND_ARGUMENTS,
global_name='SEARCH_BACKEND_ARGUMENTS'
)
setting_disable_simple_search = namespace.add_setting(
default=DEFAULT_SEARCH_DISABLE_SIMPLE_SEARCH,
global_name='SEARCH_DISABLE_SIMPLE_SEARCH', help_text=_(
'Disables the single term bar search leaving only the advanced '
'search button.'
)
)
setting_match_all_default_value = namespace.add_setting(
global_name='SEARCH_MATCH_ALL_DEFAULT_VALUE',
default=DEFAULT_SEARCH_MATCH_ALL_DEFAULT_VALUE,
help_text=_('Sets the default state of the "Match all" checkbox.')
)
setting_results_limit = namespace.add_setting(
default=DEFAULT_SEARCH_RESULTS_LIMIT, global_name='SEARCH_RESULTS_LIMIT',
help_text=_('Maximum number search results to fetch and display.')
)
| 37.717949
| 78
| 0.774983
|
14c4843cceb2fc5fdfbade2e4a6aca56ba46349c
| 196
|
py
|
Python
|
tests/test_shapely.py
|
paultimothymooney/docker-python-2
|
15c3f923e2ccedb46e81bcb8177a67fc42d90bcc
|
[
"Apache-2.0"
] | 2,030
|
2015-04-14T15:44:41.000Z
|
2022-03-29T18:06:53.000Z
|
tests/test_shapely.py
|
paultimothymooney/docker-python-2
|
15c3f923e2ccedb46e81bcb8177a67fc42d90bcc
|
[
"Apache-2.0"
] | 666
|
2015-04-14T20:14:30.000Z
|
2022-03-29T12:59:21.000Z
|
tests/test_shapely.py
|
paultimothymooney/docker-python-2
|
15c3f923e2ccedb46e81bcb8177a67fc42d90bcc
|
[
"Apache-2.0"
] | 951
|
2015-05-07T18:26:08.000Z
|
2022-03-27T08:46:05.000Z
|
import unittest
from shapely.geometry import Point
class TestShapely(unittest.TestCase):
def test_geometry(self):
p = Point(0.0, 0.0)
self.assertEqual("Point", p.geom_type)
| 19.6
| 46
| 0.693878
|
0ab40f9c3104d460814f69b226450d3ef399b1d8
| 1,462
|
py
|
Python
|
addressapp/api/geography.py
|
AbhiyantrikTechnology/DentalHub-Backend
|
89802b3e7671ffe8b3d287a998c3c4f375b58f03
|
[
"MIT"
] | 1
|
2021-04-03T19:57:32.000Z
|
2021-04-03T19:57:32.000Z
|
addressapp/api/geography.py
|
AbhiyantrikTechnology/DentalHub-Backend
|
89802b3e7671ffe8b3d287a998c3c4f375b58f03
|
[
"MIT"
] | null | null | null |
addressapp/api/geography.py
|
AbhiyantrikTechnology/DentalHub-Backend
|
89802b3e7671ffe8b3d287a998c3c4f375b58f03
|
[
"MIT"
] | null | null | null |
import re
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import permissions
from userapp.models import User,CustomUser
from addressapp.serializers.address import GeoSerializer
from addressapp.models import Ward
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import filters
from addressapp.serializers.address import WardSerializer
class IsPostOrIsAuthenticated(permissions.BasePermission):
def has_permission(self, request, view):
# if request.method == 'GET':
# return True
return request.user and request.user.is_authenticated
class GeographyListView(APIView):
permission_classes = (IsPostOrIsAuthenticated,)
serializer_class = WardSerializer
def get(self, request, format=None):
if User.objects.filter(id=request.user.id,admin=True).exists():
geography_obj = Ward.objects.filter(status=True)
serializer = WardSerializer(geography_obj, many=True, \
context={'request': request})
return Response(serializer.data)
elif User.objects.filter(id=request.user.id).exists():
geography_obj = Ward.objects.filter(customuser=request.user,status=True)
serializer = WardSerializer(geography_obj, many=True, \
context={'request': request})
return Response(serializer.data)
| 36.55
| 84
| 0.73461
|
36dcd2c594fd14ca2c1ffdd145b7da97fa327b57
| 10,155
|
py
|
Python
|
tests/test_builtins.py
|
drmikecrowe/xonsh
|
8bbefebef30f2a75a695aa6fc5ed23b14db8f842
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/test_builtins.py
|
drmikecrowe/xonsh
|
8bbefebef30f2a75a695aa6fc5ed23b14db8f842
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/test_builtins.py
|
drmikecrowe/xonsh
|
8bbefebef30f2a75a695aa6fc5ed23b14db8f842
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Tests the xonsh builtins."""
from __future__ import unicode_literals, print_function
import os
import re
import builtins
import types
from ast import AST, Module, Interactive, Expression
from subprocess import Popen
import pytest
from xonsh import built_ins
from xonsh.built_ins import (
reglob,
pathsearch,
helper,
superhelper,
ensure_list_of_strs,
list_of_strs_or_callables,
list_of_list_of_strs_outer_product,
regexsearch,
globsearch,
expand_path,
convert_macro_arg,
in_macro_call,
call_macro,
enter_macro,
cmds_to_specs,
)
from xonsh.environ import Env
from xonsh.proc import PopenThread, ProcProxy, ProcProxyThread
from tools import skip_if_on_windows
HOME_PATH = os.path.expanduser("~")
@pytest.fixture(autouse=True)
def xonsh_execer_autouse(xonsh_execer):
return xonsh_execer
@pytest.mark.parametrize("testfile", reglob("test_.*"))
def test_reglob_tests(testfile):
assert testfile.startswith("test_")
@pytest.fixture
def home_env(xonsh_builtins):
"""Set `__xonsh__.env ` to a new Env instance on `xonsh_builtins`"""
xonsh_builtins.__xonsh__.env = Env(HOME=HOME_PATH)
return xonsh_builtins
@skip_if_on_windows
def test_repath_backslash(home_env):
exp = os.listdir(HOME_PATH)
exp = {p for p in exp if re.match(r"\w\w.*", p)}
exp = {os.path.join(HOME_PATH, p) for p in exp}
obs = set(pathsearch(regexsearch, r"~/\w\w.*"))
assert exp == obs
@skip_if_on_windows
def test_repath_HOME_PATH_itself(home_env):
exp = HOME_PATH
obs = pathsearch(regexsearch, "~")
assert 1 == len(obs)
assert exp == obs[0]
@skip_if_on_windows
def test_repath_HOME_PATH_contents(home_env):
exp = os.listdir(HOME_PATH)
exp = {os.path.join(HOME_PATH, p) for p in exp}
obs = set(pathsearch(regexsearch, "~/.*"))
assert exp == obs
@skip_if_on_windows
def test_repath_HOME_PATH_var(home_env):
exp = HOME_PATH
obs = pathsearch(regexsearch, "$HOME")
assert 1 == len(obs)
assert exp == obs[0]
@skip_if_on_windows
def test_repath_HOME_PATH_var_brace(home_env):
exp = HOME_PATH
obs = pathsearch(regexsearch, '${"HOME"}')
assert 1 == len(obs)
assert exp == obs[0]
def test_helper_int(home_env):
helper(int, "int")
def test_helper_helper(home_env):
helper(helper, "helper")
def test_helper_env(home_env):
helper(Env, "Env")
def test_superhelper_int(home_env):
superhelper(int, "int")
def test_superhelper_helper(home_env):
superhelper(helper, "helper")
def test_superhelper_env(home_env):
superhelper(Env, "Env")
@pytest.mark.parametrize(
"exp, inp", [(["yo"], "yo"), (["yo"], ["yo"]), (["42"], 42), (["42"], [42])]
)
def test_ensure_list_of_strs(exp, inp):
obs = ensure_list_of_strs(inp)
assert exp == obs
f = lambda x: 20
@pytest.mark.parametrize(
"exp, inp",
[
(["yo"], "yo"),
(["yo"], ["yo"]),
(["42"], 42),
(["42"], [42]),
([f], f),
([f], [f]),
],
)
def test_list_of_strs_or_callables(exp, inp):
obs = list_of_strs_or_callables(inp)
assert exp == obs
@pytest.mark.parametrize(
"inp, exp",
[
(["x", ["y", "z"]], ["xy", "xz"]),
(["x", ["y", "z"], ["a"]], ["xya", "xza"]),
([["y", "z"], ["a", "b"]], ["ya", "yb", "za", "zb"]),
],
)
def test_list_of_list_of_strs_outer_product(xonsh_builtins, inp, exp):
obs = list_of_list_of_strs_outer_product(inp)
assert exp == obs
@pytest.mark.parametrize(
"s",
[
"~",
"~/",
"x=~/place",
"x=one:~/place",
"x=one:~/place:~/yo",
"x=~/one:~/place:~/yo",
],
)
def test_expand_path(s, home_env):
if os.sep != "/":
s = s.replace("/", os.sep)
if os.pathsep != ":":
s = s.replace(":", os.pathsep)
assert expand_path(s) == s.replace("~", HOME_PATH)
@pytest.mark.parametrize("kind", [str, "s", "S", "str", "string"])
def test_convert_macro_arg_str(kind):
raw_arg = "value"
arg = convert_macro_arg(raw_arg, kind, None, None)
assert arg is raw_arg
@pytest.mark.parametrize("kind", [AST, "a", "Ast"])
def test_convert_macro_arg_ast(kind):
raw_arg = "42"
arg = convert_macro_arg(raw_arg, kind, {}, None)
assert isinstance(arg, AST)
@pytest.mark.parametrize("kind", [types.CodeType, compile, "c", "code", "compile"])
def test_convert_macro_arg_code(kind):
raw_arg = "42"
arg = convert_macro_arg(raw_arg, kind, {}, None)
assert isinstance(arg, types.CodeType)
@pytest.mark.parametrize("kind", [eval, None, "v", "eval"])
def test_convert_macro_arg_eval(kind):
# literals
raw_arg = "42"
arg = convert_macro_arg(raw_arg, kind, {}, None)
assert arg == 42
# exprs
raw_arg = "x + 41"
arg = convert_macro_arg(raw_arg, kind, {}, {"x": 1})
assert arg == 42
@pytest.mark.parametrize("kind", [exec, "x", "exec"])
def test_convert_macro_arg_exec(kind):
# at global scope
raw_arg = "def f(x, y):\n return x + y"
glbs = {}
arg = convert_macro_arg(raw_arg, kind, glbs, None)
assert arg is None
assert "f" in glbs
assert glbs["f"](1, 41) == 42
# at local scope
raw_arg = "def g(z):\n return x + z\ny += 42"
glbs = {"x": 40}
locs = {"y": 1}
arg = convert_macro_arg(raw_arg, kind, glbs, locs)
assert arg is None
assert "g" in locs
assert locs["g"](1) == 41
assert "y" in locs
assert locs["y"] == 43
@pytest.mark.parametrize("kind", [type, "t", "type"])
def test_convert_macro_arg_eval(kind):
# literals
raw_arg = "42"
arg = convert_macro_arg(raw_arg, kind, {}, None)
assert arg is int
# exprs
raw_arg = "x + 41"
arg = convert_macro_arg(raw_arg, kind, {}, {"x": 1})
assert arg is int
def test_in_macro_call():
def f():
pass
with in_macro_call(f, True, True):
assert f.macro_globals
assert f.macro_locals
assert not hasattr(f, "macro_globals")
assert not hasattr(f, "macro_locals")
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_str(arg):
def f(x: str):
return x
rtn = call_macro(f, [arg], None, None)
assert rtn is arg
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_ast(arg):
def f(x: AST):
return x
rtn = call_macro(f, [arg], {}, None)
assert isinstance(rtn, AST)
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_code(arg):
def f(x: compile):
return x
rtn = call_macro(f, [arg], {}, None)
assert isinstance(rtn, types.CodeType)
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_eval(arg):
def f(x: eval):
return x
rtn = call_macro(f, [arg], {"x": 42, "y": 0}, None)
assert rtn == 42
@pytest.mark.parametrize(
"arg", ["if y:\n pass", "if 42:\n pass", "if x + y:\n pass"]
)
def test_call_macro_exec(arg):
def f(x: exec):
return x
rtn = call_macro(f, [arg], {"x": 42, "y": 0}, None)
assert rtn is None
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_raw_arg(arg):
def f(x: str):
return x
rtn = call_macro(f, ["*", arg], {"x": 42, "y": 0}, None)
assert rtn == 42
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_raw_kwarg(arg):
def f(x: str):
return x
rtn = call_macro(f, ["*", "x=" + arg], {"x": 42, "y": 0}, None)
assert rtn == 42
@pytest.mark.parametrize("arg", ["x", "42", "x + y"])
def test_call_macro_raw_kwargs(arg):
def f(x: str):
return x
rtn = call_macro(f, ["*", '**{"x" :' + arg + "}"], {"x": 42, "y": 0}, None)
assert rtn == 42
def test_call_macro_ast_eval_expr():
def f(x: ("ast", "eval")):
return x
rtn = call_macro(f, ["x == 5"], {}, None)
assert isinstance(rtn, Expression)
def test_call_macro_ast_single_expr():
def f(x: ("ast", "single")):
return x
rtn = call_macro(f, ["x == 5"], {}, None)
assert isinstance(rtn, Interactive)
def test_call_macro_ast_exec_expr():
def f(x: ("ast", "exec")):
return x
rtn = call_macro(f, ["x == 5"], {}, None)
assert isinstance(rtn, Module)
def test_call_macro_ast_eval_statement():
def f(x: ("ast", "eval")):
return x
try:
rtn = call_macro(f, ["x = 5"], {}, None)
assert False
except SyntaxError:
# It doesn't make sense to pass a statement to
# something that expects to be evaled
assert True
else:
assert False
def test_call_macro_ast_single_statement():
def f(x: ("ast", "single")):
return x
rtn = call_macro(f, ["x = 5"], {}, None)
assert isinstance(rtn, Interactive)
def test_call_macro_ast_exec_statement():
def f(x: ("ast", "exec")):
return x
rtn = call_macro(f, ["x = 5"], {}, None)
assert isinstance(rtn, Module)
def test_enter_macro():
obj = lambda: None
rtn = enter_macro(obj, "wakka", True, True)
assert obj is rtn
assert obj.macro_block == "wakka"
assert obj.macro_globals
assert obj.macro_locals
@skip_if_on_windows
def test_cmds_to_specs_thread_subproc(xonsh_builtins):
env = xonsh_builtins.__xonsh__.env
cmds = [["pwd"]]
# First check that threadable subprocs become threadable
env['THREAD_SUBPROCS'] = True
specs = cmds_to_specs(cmds, captured='hiddenobject')
assert specs[0].cls is PopenThread
# turn off threading and check we use Popen
env['THREAD_SUBPROCS'] = False
specs = cmds_to_specs(cmds, captured='hiddenobject')
assert specs[0].cls is Popen
# now check the threadbility of callable aliases
cmds = [[lambda: "Keras Selyrian"]]
# check that threadable alias become threadable
env['THREAD_SUBPROCS'] = True
specs = cmds_to_specs(cmds, captured='hiddenobject')
assert specs[0].cls is ProcProxyThread
# turn off threading and check we use ProcProxy
env['THREAD_SUBPROCS'] = False
specs = cmds_to_specs(cmds, captured='hiddenobject')
assert specs[0].cls is ProcProxy
| 24.236277
| 83
| 0.617725
|
904d838c694a787bc45f761033cd9853f157bdb6
| 498
|
py
|
Python
|
Mas/Email/errors.py
|
XtremeGood/DjantoSoft
|
aa6e89f7319d473d06117c2a5f56abcb40d4060a
|
[
"Apache-2.0"
] | null | null | null |
Mas/Email/errors.py
|
XtremeGood/DjantoSoft
|
aa6e89f7319d473d06117c2a5f56abcb40d4060a
|
[
"Apache-2.0"
] | null | null | null |
Mas/Email/errors.py
|
XtremeGood/DjantoSoft
|
aa6e89f7319d473d06117c2a5f56abcb40d4060a
|
[
"Apache-2.0"
] | null | null | null |
class SubjectError(Exception):
def __init__(self, expression, message=""):
self.expression = expression
self.message = f'Invalid subject {expression}' if not message else message
super().__init__(self.message)
class FileReadingError(Exception):
def __init__(self, expression, message="", file_name =""):
self.expression = expression
self.message = f'Failure reading file {file_name}' if not message else message
super().__init__(self.message)
| 41.5
| 86
| 0.694779
|
588539d075191df118a705abd9e7f78419142ffb
| 5,005
|
py
|
Python
|
pr-data/pr-data.py
|
thebombzen/azur-lane-calculators
|
29163c0356776f629019760d7840e4804373fcbd
|
[
"MIT"
] | 2
|
2019-12-10T12:52:43.000Z
|
2020-06-09T00:47:46.000Z
|
pr-data/pr-data.py
|
thebombzen/azur-lane
|
29163c0356776f629019760d7840e4804373fcbd
|
[
"MIT"
] | null | null | null |
pr-data/pr-data.py
|
thebombzen/azur-lane
|
29163c0356776f629019760d7840e4804373fcbd
|
[
"MIT"
] | null | null | null |
# python3 uwsgi
import cgi
import subprocess
import os
import re
import sys
import tempfile
import traceback
colors = {
'ok': '#0D800F',
'warn': '#C8B00F',
'error': '#B00D0F',
}
asset_cache = {}
static_asset_dir = 'pr-data/static/'
def eprint(*args, **kwargs):
kwargs['file'] = sys.stderr
print(*args, **kwargs)
# will later wrap python.logging
def logprint(*args, **kwargs):
eprint(*args, **kwargs)
def canonicalize_asset(asset):
return asset if asset.startswith(static_asset_dir) else static_asset_dir + asset
def canonicalassets(f):
def inner(asset):
return f(canonicalize_asset(asset))
return inner
@canonicalassets
def load_asset(asset):
logprint(f'Loading asset: {asset}')
mtime = os.path.getmtime(asset)
with open(asset, 'rb') as fd:
asset_cache[asset] = {
'mtime': mtime,
'data' : fd.read(),
}
return asset_cache[asset]['data']
@canonicalassets
def get_asset(asset):
if asset not in asset_cache:
return load_asset(asset)
mtime = os.path.getmtime(asset)
if asset_cache[asset]['mtime'] != mtime:
logprint(f'Asset changed on disk: {asset}')
return load_asset(asset)
return asset_cache[asset]['data']
def application(env, start_response):
try:
status, headers, lines = response_checker(env, start_response)
start_response(status, headers)
return lines
except Exception as ex:
traceback.print_exc()
status = '500 Internal Server Error'
start_response(status, [('Content-Type','text/html')])
return [get_asset('500.html')]
def response_checker(env, start_response):
status, lines = main(env, start_response)
if status == '200 OK':
return (status, [('Content-Type','text/html')], lines)
headers = []
if status == '302 Found' or status == '301 Moved Permanently':
headers += [('location', lines)]
return (status, headers, [])
headers += [('Content-Type','text/html')]
lines = []
if status == '400 Bad Request':
lines += [get_asset('400.html')]
elif status == '404 Not Found':
lines += [get_asset('404.html')]
elif status == '405 Method Not Allowed':
lines += [get_asset('405.html')]
else:
lines += [re.sub(r'\{STATUS\}', status, get_asset('error.html').decode()).encode()]
return (status, [('Content-Type','text/html')], lines)
def main(env, start_response):
if env['REQUEST_URI'] != '/azur-lane/pr-data/':
return ('404 Not Found', None)
if not is_post_request(env):
return ('200 OK', [get_asset('header.html'), get_asset('tail.html')])
form = get_post_form(env)
project_series = form.getvalue('project-series', '')
project_type = form.getvalue('project-type', '')
project_name = form.getvalue('project-name', '')
results_screenshot = form['results-screenshot']
if project_series == '' or project_name == '' or results_screenshot is None or results_screenshot.filename is None or results_screenshot.filename == '':
return ('400 Bad Request', None)
fd, tmp_name = tempfile.mkstemp(prefix='pr-data-', dir='/tmp/')
tmp = os.fdopen(fd, mode='w+b')
tmp.write(results_screenshot.file.read())
status = subprocess.run(['pr-data/image-uploaded.sh', tmp_name, results_screenshot.filename, project_series, project_type, project_name], capture_output=True).stdout.decode()
success = get_asset('success.html').decode()
status_dict = {}
for line in status.splitlines():
match = re.match(r'(\w+):\s(.*)', line)
if not match:
print(line)
k, v = match.group(1, 2)
status_dict[k] = v
success = success.replace('COLOR', colors[status_dict['color']], 1)
success = success.replace('STATUS', status_dict['status'], 1)
success = success.replace('EXTRA', status_dict['extra'], 1)
return ('200 OK', [
get_asset('header.html'),
success.encode(),
get_asset('tail.html'),
]);
def is_post_request(environ):
if environ['REQUEST_METHOD'].upper() != 'POST':
return False
return environ.get('CONTENT_TYPE', '').startswith('multipart/form-data')
def get_post_form(environ):
input = environ['wsgi.input']
post_form = environ.get('wsgi.post_form')
if (post_form is not None
and post_form[0] is input):
return post_form[2]
# This must be done to avoid a bug in cgi.FieldStorage
environ.setdefault('QUERY_STRING', '')
fs = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
new_input = InputProcessed()
post_form = (new_input, input, fs)
environ['wsgi.post_form'] = post_form
environ['wsgi.input'] = new_input
return fs
class InputProcessed(object):
def read(self, *args):
raise EOFError('The wsgi.input stream has already been consumed')
readline = readlines = __iter__ = read
| 34.047619
| 178
| 0.635564
|
ff689ac7e4d06f135b1726c646f1d665880af8ce
| 2,859
|
py
|
Python
|
tests/numpy_unit_testing/test_function_binary_operator_right_shift.py
|
jiajiaxu123/Orca
|
e86189e70c1d0387816bb98b8047a6232fbda9df
|
[
"Apache-2.0"
] | 20
|
2019-12-02T11:49:12.000Z
|
2021-12-24T19:34:32.000Z
|
tests/numpy_unit_testing/test_function_binary_operator_right_shift.py
|
jiajiaxu123/Orca
|
e86189e70c1d0387816bb98b8047a6232fbda9df
|
[
"Apache-2.0"
] | null | null | null |
tests/numpy_unit_testing/test_function_binary_operator_right_shift.py
|
jiajiaxu123/Orca
|
e86189e70c1d0387816bb98b8047a6232fbda9df
|
[
"Apache-2.0"
] | 5
|
2019-12-02T12:16:22.000Z
|
2021-10-22T02:27:47.000Z
|
import unittest
from setup.settings import *
from numpy.testing import *
from pandas.util.testing import *
import numpy as np
import dolphindb_numpy as dnp
import pandas as pd
import orca
class FunctionrightshiftTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
# connect to a DolphinDB server
orca.connect(HOST, PORT, "admin", "123456")
def test_function_math_binary_right_shift_scalar(self):
self.assertEqual(dnp.right_shift(1, 4), np.right_shift(1, 4))
self.assertEqual(dnp.right_shift(1, -5), np.right_shift(1, -5))
self.assertEqual(dnp.right_shift(0, 9), np.right_shift(0, 9))
def test_function_math_binary_right_shift_list(self):
lst1 = [1, 2, 3]
lst2 = [4, 6, 9]
assert_array_equal(dnp.right_shift(lst1, lst2), np.right_shift(lst1, lst2))
def test_function_math_binary_right_shift_array_with_scalar(self):
npa = np.array([1, 2, 3])
dnpa = dnp.array([1, 2, 3])
assert_array_equal(dnp.right_shift(dnpa, 1), np.right_shift(npa, 1))
assert_array_equal(dnp.right_shift(1, dnpa), np.right_shift(1, npa))
def test_function_math_binary_right_shift_array_with_array(self):
npa1 = np.array([1, 2, 3])
npa2 = np.array([4, 6, 9])
dnpa1 = dnp.array([1, 2, 3])
dnpa2 = dnp.array([4, 6, 9])
assert_array_equal(dnp.right_shift(dnpa1, dnpa2), np.right_shift(npa1, npa2))
def test_function_math_binary_right_shift_array_with_array_param_out(self):
npa1 = np.array([1, 2, 3])
npa2 = np.array([4, 6, 9])
npa = np.zeros(shape=(1, 3))
dnpa1 = dnp.array([1, 2, 3])
dnpa2 = dnp.array([4, 6, 9])
dnpa = dnp.zeros(shape=(1, 3))
np.right_shift(npa1, npa2, out=npa)
dnp.right_shift(dnpa1, dnpa2, out=dnpa)
assert_array_equal(dnpa, npa)
def test_function_math_binary_right_shift_array_with_series(self):
npa = np.array([1, 2, 3])
dnpa = dnp.array([1, 2, 3])
ps = pd.Series([4, 6, 9])
os = orca.Series([4, 6, 9])
assert_series_equal(dnp.right_shift(dnpa, os).to_pandas(), np.right_shift(npa, ps))
assert_series_equal(dnp.right_shift(os, dnpa).to_pandas(), np.right_shift(ps, npa))
pser = pd.Series([1, 2, 3])
oser = orca.Series([1, 2, 3])
assert_series_equal(dnp.right_shift(os, oser).to_pandas(), np.right_shift(ps, pser))
def test_function_math_binary_right_shift_array_with_dataframe(self):
npa = np.array([1, 2, 3])
dnpa = dnp.array([1, 2, 3])
pdf = pd.DataFrame({'A': [4, 6, 9]})
odf = orca.DataFrame({'A': [4, 6, 9]})
# TODO: orca right_shift bug
# assert_frame_equal(odf.right_shift(dnpa, axis=0).to_pandas(), pdf.right_shift(npa, axis=0))
if __name__ == '__main__':
unittest.main()
| 37.12987
| 101
| 0.639035
|
0c7a4531b8f9ff7d97e599fdd21df102f4bc4189
| 2,624
|
py
|
Python
|
geostore/check_stac_metadata/task.py
|
adisbladis/geostore
|
79439c06b33414e1e26b3aa4b93a72fd7cbbae83
|
[
"MIT"
] | 25
|
2021-05-19T08:05:07.000Z
|
2022-03-14T02:48:58.000Z
|
geostore/check_stac_metadata/task.py
|
adisbladis/geostore
|
79439c06b33414e1e26b3aa4b93a72fd7cbbae83
|
[
"MIT"
] | 311
|
2021-05-17T23:04:56.000Z
|
2022-03-31T10:41:44.000Z
|
geostore/check_stac_metadata/task.py
|
adisbladis/geostore
|
79439c06b33414e1e26b3aa4b93a72fd7cbbae83
|
[
"MIT"
] | 1
|
2022-01-03T05:38:32.000Z
|
2022-01-03T05:38:32.000Z
|
from logging import Logger
from botocore.exceptions import ClientError
from botocore.response import StreamingBody
from jsonschema import ValidationError, validate
from linz_logger import get_log
from ..api_keys import SUCCESS_KEY
from ..error_response_keys import ERROR_MESSAGE_KEY
from ..logging_keys import (
LOG_MESSAGE_LAMBDA_FAILURE,
LOG_MESSAGE_LAMBDA_START,
LOG_MESSAGE_VALIDATION_COMPLETE,
)
from ..parameter_store import ParameterName, get_param
from ..s3 import get_s3_client_for_role
from ..s3_utils import get_bucket_and_key_from_url
from ..step_function import Outcome, get_hash_key
from ..step_function_keys import DATASET_ID_KEY, METADATA_URL_KEY, S3_ROLE_ARN_KEY, VERSION_ID_KEY
from ..types import JsonObject
from ..validation_results_model import ValidationResultFactory
from .utils import STACDatasetValidator
LOGGER: Logger = get_log()
def lambda_handler(event: JsonObject, _context: bytes) -> JsonObject:
LOGGER.debug(LOG_MESSAGE_LAMBDA_START, extra={"lambda_input": event})
# validate input
try:
validate(
event,
{
"type": "object",
"properties": {
DATASET_ID_KEY: {"type": "string"},
VERSION_ID_KEY: {"type": "string"},
METADATA_URL_KEY: {"type": "string"},
S3_ROLE_ARN_KEY: {"type": "string"},
},
"required": [DATASET_ID_KEY, METADATA_URL_KEY, S3_ROLE_ARN_KEY, VERSION_ID_KEY],
"additionalProperties": True,
},
)
except ValidationError as error:
LOGGER.warning(
LOG_MESSAGE_VALIDATION_COMPLETE, extra={"outcome": Outcome.FAILED, "error": error}
)
return {ERROR_MESSAGE_KEY: error.message}
try:
s3_client = get_s3_client_for_role(event[S3_ROLE_ARN_KEY])
except ClientError as error:
LOGGER.warning(LOG_MESSAGE_LAMBDA_FAILURE, extra={"error": error})
return {ERROR_MESSAGE_KEY: str(error)}
def s3_url_reader(url: str) -> StreamingBody:
bucket_name, key = get_bucket_and_key_from_url(url)
response = s3_client.get_object(Bucket=bucket_name, Key=key)
return response["Body"]
hash_key = get_hash_key(event[DATASET_ID_KEY], event[VERSION_ID_KEY])
validation_result_factory = ValidationResultFactory(
hash_key, get_param(ParameterName.STORAGE_VALIDATION_RESULTS_TABLE_NAME)
)
validator = STACDatasetValidator(hash_key, s3_url_reader, validation_result_factory)
validator.run(event[METADATA_URL_KEY])
return {SUCCESS_KEY: True}
| 36.444444
| 98
| 0.703506
|
2d3b1c1f3cdb8ebe90410027bc4df4ff9aadba9f
| 41,376
|
py
|
Python
|
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/python/debug/cli/debugger_cli_common.py
|
JustinACoder/H22-GR3-UnrealAI
|
361eb9ef1147f8a2991e5f98c4118cd823184adf
|
[
"MIT"
] | 6
|
2022-02-04T18:12:24.000Z
|
2022-03-21T23:57:12.000Z
|
Lib/site-packages/tensorflow/python/debug/cli/debugger_cli_common.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/python/debug/cli/debugger_cli_common.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-08T03:53:23.000Z
|
2022-02-08T03:53:23.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Building Blocks of TensorFlow Debugger Command-Line Interface."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os
import re
import sre_constants
import traceback
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python import pywrap_tensorflow_internal
from tensorflow.python.platform import gfile
HELP_INDENT = " "
EXPLICIT_USER_EXIT = "explicit_user_exit"
REGEX_MATCH_LINES_KEY = "regex_match_lines"
INIT_SCROLL_POS_KEY = "init_scroll_pos"
MAIN_MENU_KEY = "mm:"
class CommandLineExit(Exception):
def __init__(self, exit_token=None):
Exception.__init__(self)
self._exit_token = exit_token
@property
def exit_token(self):
return self._exit_token
class RichLine(object):
"""Rich single-line text.
Attributes:
text: A plain string, the raw text represented by this object. Should not
contain newlines.
font_attr_segs: A list of (start, end, font attribute) triples, representing
richness information applied to substrings of text.
"""
def __init__(self, text="", font_attr=None):
"""Construct a RichLine with no rich attributes or a single attribute.
Args:
text: Raw text string
font_attr: If specified, a single font attribute to be applied to the
entire text. Extending this object via concatenation allows creation
of text with varying attributes.
"""
# TODO(ebreck) Make .text and .font_attr protected members when we no
# longer need public access.
self.text = text
if font_attr:
self.font_attr_segs = [(0, len(text), font_attr)]
else:
self.font_attr_segs = []
def __add__(self, other):
"""Concatenate two chunks of maybe rich text to make a longer rich line.
Does not modify self.
Args:
other: Another piece of text to concatenate with this one.
If it is a plain str, it will be appended to this string with no
attributes. If it is a RichLine, it will be appended to this string
with its attributes preserved.
Returns:
A new RichLine comprising both chunks of text, with appropriate
attributes applied to the corresponding substrings.
"""
ret = RichLine()
if isinstance(other, six.string_types):
ret.text = self.text + other
ret.font_attr_segs = self.font_attr_segs[:]
return ret
elif isinstance(other, RichLine):
ret.text = self.text + other.text
ret.font_attr_segs = self.font_attr_segs[:]
old_len = len(self.text)
for start, end, font_attr in other.font_attr_segs:
ret.font_attr_segs.append((old_len + start, old_len + end, font_attr))
return ret
else:
raise TypeError("%r cannot be concatenated with a RichLine" % other)
def __len__(self):
return len(self.text)
def rich_text_lines_from_rich_line_list(rich_text_list, annotations=None):
"""Convert a list of RichLine objects or strings to a RichTextLines object.
Args:
rich_text_list: a list of RichLine objects or strings
annotations: annotatoins for the resultant RichTextLines object.
Returns:
A corresponding RichTextLines object.
"""
lines = []
font_attr_segs = {}
for i, rl in enumerate(rich_text_list):
if isinstance(rl, RichLine):
lines.append(rl.text)
if rl.font_attr_segs:
font_attr_segs[i] = rl.font_attr_segs
else:
lines.append(rl)
return RichTextLines(lines, font_attr_segs, annotations=annotations)
def get_tensorflow_version_lines(include_dependency_versions=False):
"""Generate RichTextLines with TensorFlow version info.
Args:
include_dependency_versions: Include the version of TensorFlow's key
dependencies, such as numpy.
Returns:
A formatted, multi-line `RichTextLines` object.
"""
lines = ["TensorFlow version: %s" % pywrap_tensorflow_internal.__version__]
lines.append("")
if include_dependency_versions:
lines.append("Dependency version(s):")
lines.append(" numpy: %s" % np.__version__)
lines.append("")
return RichTextLines(lines)
class RichTextLines(object):
"""Rich multi-line text.
Line-by-line text output, with font attributes (e.g., color) and annotations
(e.g., indices in a multi-dimensional tensor). Used as the text output of CLI
commands. Can be rendered on terminal environments such as curses.
This is not to be confused with Rich Text Format (RTF). This class is for text
lines only.
"""
def __init__(self, lines, font_attr_segs=None, annotations=None):
"""Constructor of RichTextLines.
Args:
lines: A list of str or a single str, representing text output to
screen. The latter case is for convenience when the text output is
single-line.
font_attr_segs: A map from 0-based row index to a list of 3-tuples.
It lists segments in each row that have special font attributes, such
as colors, that are not the default attribute. For example:
{1: [(0, 3, "red"), (4, 7, "green")], 2: [(10, 20, "yellow")]}
In each tuple, the 1st element is the start index of the segment. The
2nd element is the end index, in an "open interval" fashion. The 3rd
element is an object or a list of objects that represents the font
attribute. Colors are represented as strings as in the examples above.
annotations: A map from 0-based row index to any object for annotating
the row. A typical use example is annotating rows of the output as
indices in a multi-dimensional tensor. For example, consider the
following text representation of a 3x2x2 tensor:
[[[0, 0], [0, 0]],
[[0, 0], [0, 0]],
[[0, 0], [0, 0]]]
The annotation can indicate the indices of the first element shown in
each row, i.e.,
{0: [0, 0, 0], 1: [1, 0, 0], 2: [2, 0, 0]}
This information can make display of tensors on screen clearer and can
help the user navigate (scroll) to the desired location in a large
tensor.
Raises:
ValueError: If lines is of invalid type.
"""
if isinstance(lines, list):
self._lines = lines
elif isinstance(lines, six.string_types):
self._lines = [lines]
else:
raise ValueError("Unexpected type in lines: %s" % type(lines))
self._font_attr_segs = font_attr_segs
if not self._font_attr_segs:
self._font_attr_segs = {}
# TODO(cais): Refactor to collections.defaultdict(list) to simplify code.
self._annotations = annotations
if not self._annotations:
self._annotations = {}
# TODO(cais): Refactor to collections.defaultdict(list) to simplify code.
@property
def lines(self):
return self._lines
@property
def font_attr_segs(self):
return self._font_attr_segs
@property
def annotations(self):
return self._annotations
def num_lines(self):
return len(self._lines)
def slice(self, begin, end):
"""Slice a RichTextLines object.
The object itself is not changed. A sliced instance is returned.
Args:
begin: (int) Beginning line index (inclusive). Must be >= 0.
end: (int) Ending line index (exclusive). Must be >= 0.
Returns:
(RichTextLines) Sliced output instance of RichTextLines.
Raises:
ValueError: If begin or end is negative.
"""
if begin < 0 or end < 0:
raise ValueError("Encountered negative index.")
# Copy lines.
lines = self.lines[begin:end]
# Slice font attribute segments.
font_attr_segs = {}
for key in self.font_attr_segs:
if key >= begin and key < end:
font_attr_segs[key - begin] = self.font_attr_segs[key]
# Slice annotations.
annotations = {}
for key in self.annotations:
if not isinstance(key, int):
# Annotations can contain keys that are not line numbers.
annotations[key] = self.annotations[key]
elif key >= begin and key < end:
annotations[key - begin] = self.annotations[key]
return RichTextLines(
lines, font_attr_segs=font_attr_segs, annotations=annotations)
def extend(self, other):
"""Extend this instance of RichTextLines with another instance.
The extension takes effect on the text lines, the font attribute segments,
as well as the annotations. The line indices in the font attribute
segments and the annotations are adjusted to account for the existing
lines. If there are duplicate, non-line-index fields in the annotations,
the value from the input argument "other" will override that in this
instance.
Args:
other: (RichTextLines) The other RichTextLines instance to be appended at
the end of this instance.
"""
orig_num_lines = self.num_lines() # Record original number of lines.
# Merge the lines.
self._lines.extend(other.lines)
# Merge the font_attr_segs.
for line_index in other.font_attr_segs:
self._font_attr_segs[orig_num_lines + line_index] = (
other.font_attr_segs[line_index])
# Merge the annotations.
for key in other.annotations:
if isinstance(key, int):
self._annotations[orig_num_lines + key] = (other.annotations[key])
else:
self._annotations[key] = other.annotations[key]
def _extend_before(self, other):
"""Add another RichTextLines object to the front.
Args:
other: (RichTextLines) The other object to add to the front to this
object.
"""
other_num_lines = other.num_lines() # Record original number of lines.
# Merge the lines.
self._lines = other.lines + self._lines
# Merge the font_attr_segs.
new_font_attr_segs = {}
for line_index in self.font_attr_segs:
new_font_attr_segs[other_num_lines + line_index] = (
self.font_attr_segs[line_index])
new_font_attr_segs.update(other.font_attr_segs)
self._font_attr_segs = new_font_attr_segs
# Merge the annotations.
new_annotations = {}
for key in self._annotations:
if isinstance(key, int):
new_annotations[other_num_lines + key] = (self.annotations[key])
else:
new_annotations[key] = other.annotations[key]
new_annotations.update(other.annotations)
self._annotations = new_annotations
def append(self, line, font_attr_segs=None):
"""Append a single line of text.
Args:
line: (str) The text to be added to the end.
font_attr_segs: (list of tuples) Font attribute segments of the appended
line.
"""
self._lines.append(line)
if font_attr_segs:
self._font_attr_segs[len(self._lines) - 1] = font_attr_segs
def append_rich_line(self, rich_line):
self.append(rich_line.text, rich_line.font_attr_segs)
def prepend(self, line, font_attr_segs=None):
"""Prepend (i.e., add to the front) a single line of text.
Args:
line: (str) The text to be added to the front.
font_attr_segs: (list of tuples) Font attribute segments of the appended
line.
"""
other = RichTextLines(line)
if font_attr_segs:
other.font_attr_segs[0] = font_attr_segs
self._extend_before(other)
def write_to_file(self, file_path):
"""Write the object itself to file, in a plain format.
The font_attr_segs and annotations are ignored.
Args:
file_path: (str) path of the file to write to.
"""
with gfile.Open(file_path, "w") as f:
for line in self._lines:
f.write(line + "\n")
# TODO(cais): Add a method to allow appending to a line in RichTextLines with
# both text and font_attr_segs.
def regex_find(orig_screen_output, regex, font_attr):
"""Perform regex match in rich text lines.
Produces a new RichTextLines object with font_attr_segs containing highlighted
regex matches.
Example use cases include:
1) search for specific items in a large list of items, and
2) search for specific numerical values in a large tensor.
Args:
orig_screen_output: The original RichTextLines, in which the regex find
is to be performed.
regex: The regex used for matching.
font_attr: Font attribute used for highlighting the found result.
Returns:
A modified copy of orig_screen_output.
Raises:
ValueError: If input str regex is not a valid regular expression.
"""
new_screen_output = RichTextLines(
orig_screen_output.lines,
font_attr_segs=copy.deepcopy(orig_screen_output.font_attr_segs),
annotations=orig_screen_output.annotations)
try:
re_prog = re.compile(regex)
except sre_constants.error:
raise ValueError("Invalid regular expression: \"%s\"" % regex)
regex_match_lines = []
for i in xrange(len(new_screen_output.lines)):
line = new_screen_output.lines[i]
find_it = re_prog.finditer(line)
match_segs = []
for match in find_it:
match_segs.append((match.start(), match.end(), font_attr))
if match_segs:
if i not in new_screen_output.font_attr_segs:
new_screen_output.font_attr_segs[i] = match_segs
else:
new_screen_output.font_attr_segs[i].extend(match_segs)
new_screen_output.font_attr_segs[i] = sorted(
new_screen_output.font_attr_segs[i], key=lambda x: x[0])
regex_match_lines.append(i)
new_screen_output.annotations[REGEX_MATCH_LINES_KEY] = regex_match_lines
return new_screen_output
def wrap_rich_text_lines(inp, cols):
"""Wrap RichTextLines according to maximum number of columns.
Produces a new RichTextLines object with the text lines, font_attr_segs and
annotations properly wrapped. This ought to be used sparingly, as in most
cases, command handlers producing RichTextLines outputs should know the
screen/panel width via the screen_info kwarg and should produce properly
length-limited lines in the output accordingly.
Args:
inp: Input RichTextLines object.
cols: Number of columns, as an int.
Returns:
1) A new instance of RichTextLines, with line lengths limited to cols.
2) A list of new (wrapped) line index. For example, if the original input
consists of three lines and only the second line is wrapped, and it's
wrapped into two lines, this return value will be: [0, 1, 3].
Raises:
ValueError: If inputs have invalid types.
"""
new_line_indices = []
if not isinstance(inp, RichTextLines):
raise ValueError("Invalid type of input screen_output")
if not isinstance(cols, int):
raise ValueError("Invalid type of input cols")
out = RichTextLines([])
row_counter = 0 # Counter for new row index
for i in xrange(len(inp.lines)):
new_line_indices.append(out.num_lines())
line = inp.lines[i]
if i in inp.annotations:
out.annotations[row_counter] = inp.annotations[i]
if len(line) <= cols:
# No wrapping.
out.lines.append(line)
if i in inp.font_attr_segs:
out.font_attr_segs[row_counter] = inp.font_attr_segs[i]
row_counter += 1
else:
# Wrap.
wlines = [] # Wrapped lines.
osegs = []
if i in inp.font_attr_segs:
osegs = inp.font_attr_segs[i]
idx = 0
while idx < len(line):
if idx + cols > len(line):
rlim = len(line)
else:
rlim = idx + cols
wlines.append(line[idx:rlim])
for seg in osegs:
if (seg[0] < rlim) and (seg[1] >= idx):
# Calculate left bound within wrapped line.
if seg[0] >= idx:
lb = seg[0] - idx
else:
lb = 0
# Calculate right bound within wrapped line.
if seg[1] < rlim:
rb = seg[1] - idx
else:
rb = rlim - idx
if rb > lb: # Omit zero-length segments.
wseg = (lb, rb, seg[2])
if row_counter not in out.font_attr_segs:
out.font_attr_segs[row_counter] = [wseg]
else:
out.font_attr_segs[row_counter].append(wseg)
idx += cols
row_counter += 1
out.lines.extend(wlines)
# Copy over keys of annotation that are not row indices.
for key in inp.annotations:
if not isinstance(key, int):
out.annotations[key] = inp.annotations[key]
return out, new_line_indices
class CommandHandlerRegistry(object):
"""Registry of command handlers for CLI.
Handler methods (callables) for user commands can be registered with this
class, which then is able to dispatch commands to the correct handlers and
retrieve the RichTextLines output.
For example, suppose you have the following handler defined:
def echo(argv, screen_info=None):
return RichTextLines(["arguments = %s" % " ".join(argv),
"screen_info = " + repr(screen_info)])
you can register the handler with the command prefix "echo" and alias "e":
registry = CommandHandlerRegistry()
registry.register_command_handler("echo", echo,
"Echo arguments, along with screen info", prefix_aliases=["e"])
then to invoke this command handler with some arguments and screen_info, do:
registry.dispatch_command("echo", ["foo", "bar"], screen_info={"cols": 80})
or with the prefix alias:
registry.dispatch_command("e", ["foo", "bar"], screen_info={"cols": 80})
The call will return a RichTextLines object which can be rendered by a CLI.
"""
HELP_COMMAND = "help"
HELP_COMMAND_ALIASES = ["h"]
VERSION_COMMAND = "version"
VERSION_COMMAND_ALIASES = ["ver"]
def __init__(self):
# A dictionary from command prefix to handler.
self._handlers = {}
# A dictionary from prefix alias to prefix.
self._alias_to_prefix = {}
# A dictionary from prefix to aliases.
self._prefix_to_aliases = {}
# A dictionary from command prefix to help string.
self._prefix_to_help = {}
# Introductory text to help information.
self._help_intro = None
# Register a default handler for the command "help".
self.register_command_handler(
self.HELP_COMMAND,
self._help_handler,
"Print this help message.",
prefix_aliases=self.HELP_COMMAND_ALIASES)
# Register a default handler for the command "version".
self.register_command_handler(
self.VERSION_COMMAND,
self._version_handler,
"Print the versions of TensorFlow and its key dependencies.",
prefix_aliases=self.VERSION_COMMAND_ALIASES)
def register_command_handler(self,
prefix,
handler,
help_info,
prefix_aliases=None):
"""Register a callable as a command handler.
Args:
prefix: Command prefix, i.e., the first word in a command, e.g.,
"print" as in "print tensor_1".
handler: A callable of the following signature:
foo_handler(argv, screen_info=None),
where argv is the argument vector (excluding the command prefix) and
screen_info is a dictionary containing information about the screen,
such as number of columns, e.g., {"cols": 100}.
The callable should return:
1) a RichTextLines object representing the screen output.
The callable can also raise an exception of the type CommandLineExit,
which if caught by the command-line interface, will lead to its exit.
The exception can optionally carry an exit token of arbitrary type.
help_info: A help string.
prefix_aliases: Aliases for the command prefix, as a list of str. E.g.,
shorthands for the command prefix: ["p", "pr"]
Raises:
ValueError: If
1) the prefix is empty, or
2) handler is not callable, or
3) a handler is already registered for the prefix, or
4) elements in prefix_aliases clash with existing aliases.
5) help_info is not a str.
"""
if not prefix:
raise ValueError("Empty command prefix")
if prefix in self._handlers:
raise ValueError(
"A handler is already registered for command prefix \"%s\"" % prefix)
# Make sure handler is callable.
if not callable(handler):
raise ValueError("handler is not callable")
# Make sure that help info is a string.
if not isinstance(help_info, six.string_types):
raise ValueError("help_info is not a str")
# Process prefix aliases.
if prefix_aliases:
for alias in prefix_aliases:
if self._resolve_prefix(alias):
raise ValueError(
"The prefix alias \"%s\" clashes with existing prefixes or "
"aliases." % alias)
self._alias_to_prefix[alias] = prefix
self._prefix_to_aliases[prefix] = prefix_aliases
# Store handler.
self._handlers[prefix] = handler
# Store help info.
self._prefix_to_help[prefix] = help_info
def dispatch_command(self, prefix, argv, screen_info=None):
"""Handles a command by dispatching it to a registered command handler.
Args:
prefix: Command prefix, as a str, e.g., "print".
argv: Command argument vector, excluding the command prefix, represented
as a list of str, e.g.,
["tensor_1"]
screen_info: A dictionary containing screen info, e.g., {"cols": 100}.
Returns:
An instance of RichTextLines or None. If any exception is caught during
the invocation of the command handler, the RichTextLines will wrap the
error type and message.
Raises:
ValueError: If
1) prefix is empty, or
2) no command handler is registered for the command prefix, or
3) the handler is found for the prefix, but it fails to return a
RichTextLines or raise any exception.
CommandLineExit:
If the command handler raises this type of exception, this method will
simply pass it along.
"""
if not prefix:
raise ValueError("Prefix is empty")
resolved_prefix = self._resolve_prefix(prefix)
if not resolved_prefix:
raise ValueError("No handler is registered for command prefix \"%s\"" %
prefix)
handler = self._handlers[resolved_prefix]
try:
output = handler(argv, screen_info=screen_info)
except CommandLineExit as e:
raise e
except SystemExit as e:
# Special case for syntax errors caught by argparse.
lines = ["Syntax error for command: %s" % prefix,
"For help, do \"help %s\"" % prefix]
output = RichTextLines(lines)
except BaseException as e: # pylint: disable=broad-except
lines = ["Error occurred during handling of command: %s %s:" %
(resolved_prefix, " ".join(argv)), "%s: %s" % (type(e), str(e))]
# Include traceback of the exception.
lines.append("")
lines.extend(traceback.format_exc().split("\n"))
output = RichTextLines(lines)
if not isinstance(output, RichTextLines) and output is not None:
raise ValueError(
"Return value from command handler %s is not None or a RichTextLines "
"instance" % str(handler))
return output
def is_registered(self, prefix):
"""Test if a command prefix or its alias is has a registered handler.
Args:
prefix: A prefix or its alias, as a str.
Returns:
True iff a handler is registered for prefix.
"""
return self._resolve_prefix(prefix) is not None
def get_help(self, cmd_prefix=None):
"""Compile help information into a RichTextLines object.
Args:
cmd_prefix: Optional command prefix. As the prefix itself or one of its
aliases.
Returns:
A RichTextLines object containing the help information. If cmd_prefix
is None, the return value will be the full command-line help. Otherwise,
it will be the help information for the specified command.
"""
if not cmd_prefix:
# Print full help information, in sorted order of the command prefixes.
help_info = RichTextLines([])
if self._help_intro:
# If help intro is available, show it at the beginning.
help_info.extend(self._help_intro)
sorted_prefixes = sorted(self._handlers)
for cmd_prefix in sorted_prefixes:
lines = self._get_help_for_command_prefix(cmd_prefix)
lines.append("")
lines.append("")
help_info.extend(RichTextLines(lines))
return help_info
else:
return RichTextLines(self._get_help_for_command_prefix(cmd_prefix))
def set_help_intro(self, help_intro):
"""Set an introductory message to help output.
Args:
help_intro: (RichTextLines) Rich text lines appended to the
beginning of the output of the command "help", as introductory
information.
"""
self._help_intro = help_intro
def _help_handler(self, args, screen_info=None):
"""Command handler for "help".
"help" is a common command that merits built-in support from this class.
Args:
args: Command line arguments to "help" (not including "help" itself).
screen_info: (dict) Information regarding the screen, e.g., the screen
width in characters: {"cols": 80}
Returns:
(RichTextLines) Screen text output.
"""
_ = screen_info # Unused currently.
if not args:
return self.get_help()
elif len(args) == 1:
return self.get_help(args[0])
else:
return RichTextLines(["ERROR: help takes only 0 or 1 input argument."])
def _version_handler(self, args, screen_info=None):
del args # Unused currently.
del screen_info # Unused currently.
return get_tensorflow_version_lines(include_dependency_versions=True)
def _resolve_prefix(self, token):
"""Resolve command prefix from the prefix itself or its alias.
Args:
token: a str to be resolved.
Returns:
If resolvable, the resolved command prefix.
If not resolvable, None.
"""
if token in self._handlers:
return token
elif token in self._alias_to_prefix:
return self._alias_to_prefix[token]
else:
return None
def _get_help_for_command_prefix(self, cmd_prefix):
"""Compile the help information for a given command prefix.
Args:
cmd_prefix: Command prefix, as the prefix itself or one of its
aliases.
Returns:
A list of str as the help information fo cmd_prefix. If the cmd_prefix
does not exist, the returned list of str will indicate that.
"""
lines = []
resolved_prefix = self._resolve_prefix(cmd_prefix)
if not resolved_prefix:
lines.append("Invalid command prefix: \"%s\"" % cmd_prefix)
return lines
lines.append(resolved_prefix)
if resolved_prefix in self._prefix_to_aliases:
lines.append(HELP_INDENT + "Aliases: " + ", ".join(
self._prefix_to_aliases[resolved_prefix]))
lines.append("")
help_lines = self._prefix_to_help[resolved_prefix].split("\n")
for line in help_lines:
lines.append(HELP_INDENT + line)
return lines
class TabCompletionRegistry(object):
"""Registry for tab completion responses."""
def __init__(self):
self._comp_dict = {}
# TODO(cais): Rename method names with "comp" to "*completion*" to avoid
# confusion.
def register_tab_comp_context(self, context_words, comp_items):
"""Register a tab-completion context.
Register that, for each word in context_words, the potential tab-completions
are the words in comp_items.
A context word is a pre-existing, completed word in the command line that
determines how tab-completion works for another, incomplete word in the same
command line.
Completion items consist of potential candidates for the incomplete word.
To give a general example, a context word can be "drink", and the completion
items can be ["coffee", "tea", "water"]
Note: A context word can be empty, in which case the context is for the
top-level commands.
Args:
context_words: A list of context words belonging to the context being
registered. It is a list of str, instead of a single string, to support
synonym words triggering the same tab-completion context, e.g.,
both "drink" and the short-hand "dr" can trigger the same context.
comp_items: A list of completion items, as a list of str.
Raises:
TypeError: if the input arguments are not all of the correct types.
"""
if not isinstance(context_words, list):
raise TypeError("Incorrect type in context_list: Expected list, got %s" %
type(context_words))
if not isinstance(comp_items, list):
raise TypeError("Incorrect type in comp_items: Expected list, got %s" %
type(comp_items))
# Sort the completion items on registration, so that later during
# get_completions calls, no sorting will be necessary.
sorted_comp_items = sorted(comp_items)
for context_word in context_words:
self._comp_dict[context_word] = sorted_comp_items
def deregister_context(self, context_words):
"""Deregister a list of context words.
Args:
context_words: A list of context words to deregister, as a list of str.
Raises:
KeyError: if there are word(s) in context_words that do not correspond
to any registered contexts.
"""
for context_word in context_words:
if context_word not in self._comp_dict:
raise KeyError("Cannot deregister unregistered context word \"%s\"" %
context_word)
for context_word in context_words:
del self._comp_dict[context_word]
def extend_comp_items(self, context_word, new_comp_items):
"""Add a list of completion items to a completion context.
Args:
context_word: A single completion word as a string. The extension will
also apply to all other context words of the same context.
new_comp_items: (list of str) New completion items to add.
Raises:
KeyError: if the context word has not been registered.
"""
if context_word not in self._comp_dict:
raise KeyError("Context word \"%s\" has not been registered" %
context_word)
self._comp_dict[context_word].extend(new_comp_items)
self._comp_dict[context_word] = sorted(self._comp_dict[context_word])
def remove_comp_items(self, context_word, comp_items):
"""Remove a list of completion items from a completion context.
Args:
context_word: A single completion word as a string. The removal will
also apply to all other context words of the same context.
comp_items: Completion items to remove.
Raises:
KeyError: if the context word has not been registered.
"""
if context_word not in self._comp_dict:
raise KeyError("Context word \"%s\" has not been registered" %
context_word)
for item in comp_items:
self._comp_dict[context_word].remove(item)
def get_completions(self, context_word, prefix):
"""Get the tab completions given a context word and a prefix.
Args:
context_word: The context word.
prefix: The prefix of the incomplete word.
Returns:
(1) None if no registered context matches the context_word.
A list of str for the matching completion items. Can be an empty list
of a matching context exists, but no completion item matches the
prefix.
(2) Common prefix of all the words in the first return value. If the
first return value is None, this return value will be None, too. If
the first return value is not None, i.e., a list, this return value
will be a str, which can be an empty str if there is no common
prefix among the items of the list.
"""
if context_word not in self._comp_dict:
return None, None
comp_items = self._comp_dict[context_word]
comp_items = sorted(
[item for item in comp_items if item.startswith(prefix)])
return comp_items, self._common_prefix(comp_items)
def _common_prefix(self, m):
"""Given a list of str, returns the longest common prefix.
Args:
m: (list of str) A list of strings.
Returns:
(str) The longest common prefix.
"""
if not m:
return ""
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
class CommandHistory(object):
"""Keeps command history and supports lookup."""
_HISTORY_FILE_NAME = ".tfdbg_history"
def __init__(self, limit=100, history_file_path=None):
"""CommandHistory constructor.
Args:
limit: Maximum number of the most recent commands that this instance
keeps track of, as an int.
history_file_path: (str) Manually specified path to history file. Used in
testing.
"""
self._commands = []
self._limit = limit
self._history_file_path = (
history_file_path or self._get_default_history_file_path())
self._load_history_from_file()
def _load_history_from_file(self):
if os.path.isfile(self._history_file_path):
try:
with open(self._history_file_path, "rt") as history_file:
commands = history_file.readlines()
self._commands = [command.strip() for command in commands
if command.strip()]
# Limit the size of the history file.
if len(self._commands) > self._limit:
self._commands = self._commands[-self._limit:]
with open(self._history_file_path, "wt") as history_file:
for command in self._commands:
history_file.write(command + "\n")
except IOError:
print("WARNING: writing history file failed.")
def _add_command_to_history_file(self, command):
try:
with open(self._history_file_path, "at") as history_file:
history_file.write(command + "\n")
except IOError:
pass
@classmethod
def _get_default_history_file_path(cls):
return os.path.join(os.path.expanduser("~"), cls._HISTORY_FILE_NAME)
def add_command(self, command):
"""Add a command to the command history.
Args:
command: The history command, as a str.
Raises:
TypeError: if command is not a str.
"""
if self._commands and command == self._commands[-1]:
# Ignore repeating commands in a row.
return
if not isinstance(command, six.string_types):
raise TypeError("Attempt to enter non-str entry to command history")
self._commands.append(command)
if len(self._commands) > self._limit:
self._commands = self._commands[-self._limit:]
self._add_command_to_history_file(command)
def most_recent_n(self, n):
"""Look up the n most recent commands.
Args:
n: Number of most recent commands to look up.
Returns:
A list of n most recent commands, or all available most recent commands,
if n exceeds size of the command history, in chronological order.
"""
return self._commands[-n:]
def lookup_prefix(self, prefix, n):
"""Look up the n most recent commands that starts with prefix.
Args:
prefix: The prefix to lookup.
n: Number of most recent commands to look up.
Returns:
A list of n most recent commands that have the specified prefix, or all
available most recent commands that have the prefix, if n exceeds the
number of history commands with the prefix.
"""
commands = [cmd for cmd in self._commands if cmd.startswith(prefix)]
return commands[-n:]
# TODO(cais): Lookup by regex.
class MenuItem(object):
"""A class for an item in a text-based menu."""
def __init__(self, caption, content, enabled=True):
"""Menu constructor.
TODO(cais): Nested menu is currently not supported. Support it.
Args:
caption: (str) caption of the menu item.
content: Content of the menu item. For a menu item that triggers
a command, for example, content is the command string.
enabled: (bool) whether this menu item is enabled.
"""
self._caption = caption
self._content = content
self._enabled = enabled
@property
def caption(self):
return self._caption
@property
def type(self):
return self._node_type
@property
def content(self):
return self._content
def is_enabled(self):
return self._enabled
def disable(self):
self._enabled = False
def enable(self):
self._enabled = True
class Menu(object):
"""A class for text-based menu."""
def __init__(self, name=None):
"""Menu constructor.
Args:
name: (str or None) name of this menu.
"""
self._name = name
self._items = []
def append(self, item):
"""Append an item to the Menu.
Args:
item: (MenuItem) the item to be appended.
"""
self._items.append(item)
def insert(self, index, item):
self._items.insert(index, item)
def num_items(self):
return len(self._items)
def captions(self):
return [item.caption for item in self._items]
def caption_to_item(self, caption):
"""Get a MenuItem from the caption.
Args:
caption: (str) The caption to look up.
Returns:
(MenuItem) The first-match menu item with the caption, if any.
Raises:
LookupError: If a menu item with the caption does not exist.
"""
captions = self.captions()
if caption not in captions:
raise LookupError("There is no menu item with the caption \"%s\"" %
caption)
return self._items[captions.index(caption)]
def format_as_single_line(self,
prefix=None,
divider=" | ",
enabled_item_attrs=None,
disabled_item_attrs=None):
"""Format the menu as a single-line RichTextLines object.
Args:
prefix: (str) String added to the beginning of the line.
divider: (str) The dividing string between the menu items.
enabled_item_attrs: (list or str) Attributes applied to each enabled
menu item, e.g., ["bold", "underline"].
disabled_item_attrs: (list or str) Attributes applied to each
disabled menu item, e.g., ["red"].
Returns:
(RichTextLines) A single-line output representing the menu, with
font_attr_segs marking the individual menu items.
"""
if (enabled_item_attrs is not None and
not isinstance(enabled_item_attrs, list)):
enabled_item_attrs = [enabled_item_attrs]
if (disabled_item_attrs is not None and
not isinstance(disabled_item_attrs, list)):
disabled_item_attrs = [disabled_item_attrs]
menu_line = prefix if prefix is not None else ""
attr_segs = []
for item in self._items:
menu_line += item.caption
item_name_begin = len(menu_line) - len(item.caption)
if item.is_enabled():
final_attrs = [item]
if enabled_item_attrs:
final_attrs.extend(enabled_item_attrs)
attr_segs.append((item_name_begin, len(menu_line), final_attrs))
else:
if disabled_item_attrs:
attr_segs.append(
(item_name_begin, len(menu_line), disabled_item_attrs))
menu_line += divider
return RichTextLines(menu_line, font_attr_segs={0: attr_segs})
| 33.021548
| 81
| 0.650498
|
7188beb368060d7629cf3c8369df8c47070c62e1
| 553
|
py
|
Python
|
recommmender_system_project/tools.py
|
noufanpmc/Movie-Recommendation-System
|
2d446a51579eef9a422ca5b7a4a937d1584f42dc
|
[
"MIT"
] | null | null | null |
recommmender_system_project/tools.py
|
noufanpmc/Movie-Recommendation-System
|
2d446a51579eef9a422ca5b7a4a937d1584f42dc
|
[
"MIT"
] | null | null | null |
recommmender_system_project/tools.py
|
noufanpmc/Movie-Recommendation-System
|
2d446a51579eef9a422ca5b7a4a937d1584f42dc
|
[
"MIT"
] | null | null | null |
import pandas
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
def visualize_embeddings(embeddings: pandas.DataFrame) -> None:
"""Visualize the embeddings of the items in 2d.
Parameters
----------
embeddings : pandas.DataFrame
A dataframe with item embeddings.
Returns
-------
None
"""
tsne = TSNE(n_components=2, verbose=1, perplexity=30, n_iter=1000,learning_rate=10)
tsne_results = tsne.fit_transform(embeddings)
plt.scatter(tsne_results[:,0], tsne_results[:,1])
plt.show()
| 26.333333
| 87
| 0.683544
|
513ca17d6cbb30b4b7615c40c5609dc0ff7f3609
| 529
|
py
|
Python
|
solutions/3b/electronics/scripts/q3/publisher.py
|
adbidwai/QSTP-Kratos_electronics
|
4f7e4176a129847130d727e3c2fb7b528080a5ae
|
[
"MIT"
] | 8
|
2020-05-11T05:48:14.000Z
|
2021-09-10T06:24:52.000Z
|
solutions/3b/electronics/scripts/q3/publisher.py
|
adbidwai/QSTP-Kratos_electronics
|
4f7e4176a129847130d727e3c2fb7b528080a5ae
|
[
"MIT"
] | null | null | null |
solutions/3b/electronics/scripts/q3/publisher.py
|
adbidwai/QSTP-Kratos_electronics
|
4f7e4176a129847130d727e3c2fb7b528080a5ae
|
[
"MIT"
] | 2
|
2020-05-29T06:08:32.000Z
|
2020-08-03T13:24:09.000Z
|
#! /usr/bin/env python
import rospy
from electronics.msg import Rover #import the custom message file you created
rospy.init_node("ROVER") #intialize a node
pub = rospy.Publisher('rover',Rover,queue_size=10) # create a publisher object
msg = Rover() #create a message object of class Rover
rate = rospy.Rate(10) #initialize rate object
while not rospy.is_shutdown():
pub.publish(msg) #call the publish() method of pub object to publish messages on topic
rate.sleep()
| 37.785714
| 94
| 0.68242
|
0bb1da02035da6e8625a1114e7c8f69705443a4b
| 6,684
|
py
|
Python
|
deepclaw/driver/arms/Inverse_Kinematics/ikpy/chain.py
|
Nokkxz/ME336-Yellow-Team-Project
|
5b6d65ecb134049ba3c5d27c37f521ada79a913f
|
[
"MIT"
] | 5
|
2020-06-24T03:47:00.000Z
|
2021-10-13T03:35:38.000Z
|
deepclaw/driver/arms/Inverse_Kinematics/ikpy/chain.py
|
Nokkxz/ME336-Yellow-Team-Project
|
5b6d65ecb134049ba3c5d27c37f521ada79a913f
|
[
"MIT"
] | null | null | null |
deepclaw/driver/arms/Inverse_Kinematics/ikpy/chain.py
|
Nokkxz/ME336-Yellow-Team-Project
|
5b6d65ecb134049ba3c5d27c37f521ada79a913f
|
[
"MIT"
] | 3
|
2020-06-18T09:25:39.000Z
|
2021-04-18T03:51:08.000Z
|
# coding= utf8
"""
.. module:: chain
This module implements the Chain class.
"""
import numpy as np
# IKPY imports
from . import URDF_utils
from . import inverse_kinematics as ik
from . import link as link_lib
class Chain(object):
"""The base Chain class
Parameters
----------
links: list[ikpy.link.Link]
List of the links of the chain
active_links_mask: list
A list of boolean indicating that whether or not the corresponding link is active
name: str
The name of the Chain
"""
def __init__(self, links, active_links_mask=None, name="chain", profile=''"", **kwargs):
self.name = name
self.links = links
self._length = sum([link.length for link in links])
# Avoid length of zero in a link
for (index, link) in enumerate(self.links):
if link.length == 0:
link.axis_length = self.links[index - 1].axis_length
# If the active_links_mask is not given, set it to True for every link
if active_links_mask is not None:
if len(active_links_mask) != len(self.links):
raise ValueError("Your active links mask length of {} is different from the number of your links, which is {}".format(len(active_links_mask), len(self.links)))
self.active_links_mask = np.array(active_links_mask)
# Always set the last link to True
self.active_links_mask[-1] = False
else:
self.active_links_mask = np.array([True] * len(links))
def __repr__(self):
return "Kinematic chain name={} links={} active_links={}".format(self.name, self.links, self.active_links_mask)
def forward_kinematics(self, joints, full_kinematics=False):
"""Returns the transformation matrix of the forward kinematics
Parameters
----------
joints: list
The list of the positions of each joint. Note : Inactive joints must be in the list.
full_kinematics: bool
Return the transformation matrices of each joint
Returns
-------
frame_matrix:
The transformation matrix
"""
frame_matrix = np.eye(4)
if full_kinematics:
frame_matrixes = []
if len(self.links) != len(joints):
raise ValueError("Your joints vector length is {} but you have {} links".format(len(joints), len(self.links)))
for index, (link, joint_angle) in enumerate(zip(self.links, joints)):
# Compute iteratively the position
# NB : Use asarray to avoid old sympy problems
frame_matrix = np.dot(frame_matrix, np.asarray(link.get_transformation_matrix(joint_angle)))
if full_kinematics:
# rotation_axe = np.dot(frame_matrix, link.rotation)
frame_matrixes.append(frame_matrix)
# Return the matrix, or matrixes
if full_kinematics:
return frame_matrixes
else:
return frame_matrix
def inverse_kinematics(self, target, initial_position=None, **kwargs):
"""Computes the inverse kinematic on the specified target
Parameters
----------
target: numpy.array
The frame target of the inverse kinematic, in meters. It must be 4x4 transformation matrix
initial_position: numpy.array
Optional : the initial position of each joint of the chain. Defaults to 0 for each joint
Returns
-------
The list of the positions of each joint according to the target. Note : Inactive joints are in the list.
"""
# Checks on input
target = np.array(target)
if target.shape != (4, 4):
raise ValueError("Your target must be a 4x4 transformation matrix")
if initial_position is None:
initial_position = [0] * len(self.links)
return ik.inverse_kinematic_optimization(self, target, starting_nodes_angles=initial_position, **kwargs)
def plot(self, joints, ax, target=None, show=False):
"""Plots the Chain using Matplotlib
Parameters
----------
joints: list
The list of the positions of each joint
ax: matplotlib.axes.Axes
A matplotlib axes
target: numpy.array
An optional target
show: bool
Display the axe. Defaults to False
"""
from . import plot_utils
if ax is None:
# If ax is not given, create one
ax = plot_utils.init_3d_figure()
plot_utils.plot_chain(self, joints, ax)
plot_utils.plot_basis(ax, self._length)
# Plot the goal position
if target is not None:
plot_utils.plot_target(target, ax)
if show:
plot_utils.show_figure()
@classmethod
def from_urdf_file(cls, urdf_file, base_elements=None, last_link_vector=None, base_element_type="link", active_links_mask=None, name="chain"):
"""Creates a chain from an URDF file
Parameters
----------
urdf_file: str
The path of the URDF file
base_elements: list of strings
List of the links beginning the chain
last_link_vector: numpy.array
Optional : The translation vector of the tip.
name: str
The name of the Chain
base_element_type: str
active_links_mask: list[bool]
Note
----
IKPY works with links, whereras URDF works with joints and links. The mapping is currently misleading:
* URDF joints = IKPY links
* URDF links are not used by IKPY. They are thrown away when parsing
"""
# FIXME: Rename links to joints, to be coherent with URDF?
if base_elements is None:
base_elements = ["base_link"]
links = URDF_utils.get_urdf_parameters(urdf_file, base_elements=base_elements, last_link_vector=last_link_vector, base_element_type=base_element_type)
# Add an origin link at the beginning
return cls([link_lib.OriginLink()] + links, active_links_mask=active_links_mask, name=name)
def active_to_full(self, active_joints, initial_position):
full_joints = np.array(initial_position, copy=True, dtype=np.float)
np.place(full_joints, self.active_links_mask, active_joints)
return full_joints
def active_from_full(self, joints):
return np.compress(self.active_links_mask, joints, axis=0)
@classmethod
def concat(cls, chain1, chain2):
return cls(links=chain1.links + chain2.links, active_links_mask=chain1.active_links_mask + chain2.active_links_mask)
| 36.928177
| 175
| 0.633303
|
7e7040947b5ab26081581e2058cc44a5928bfcb8
| 506
|
py
|
Python
|
00_Program/OddEven.py
|
zharmedia386/Python-Programming-Exercises
|
5d27be4e72cc153ef6c677062a4783abfaf4390c
|
[
"MIT"
] | null | null | null |
00_Program/OddEven.py
|
zharmedia386/Python-Programming-Exercises
|
5d27be4e72cc153ef6c677062a4783abfaf4390c
|
[
"MIT"
] | null | null | null |
00_Program/OddEven.py
|
zharmedia386/Python-Programming-Exercises
|
5d27be4e72cc153ef6c677062a4783abfaf4390c
|
[
"MIT"
] | null | null | null |
# This program is to check the input number, whether including odd or even
# If input number mod 2 == 0, so that's an even number
# If input number mod 2 != 0, so that's an odd number
def input_number() :
number = int(input("Enter your number : "))
return number
def checking_number() :
number = input_number()
if(number % 2 == 0) : print("%d is an even number" % number)
elif(number % 2 != 0) : print("%d is an odd number" % number)
if __name__ == "__main__" :
checking_number()
| 33.733333
| 74
| 0.652174
|
6f330ba86e3da571ae32393d9dff42110bd0c1f4
| 3,593
|
py
|
Python
|
c2cwsgiutils/request_tracking/__init__.py
|
camptocamp/c2cwsgiutils
|
7f99937ccebc31c8e97c58de3819458eb9d6435e
|
[
"BSD-2-Clause-FreeBSD"
] | 5
|
2017-06-25T07:50:57.000Z
|
2019-01-25T16:49:57.000Z
|
c2cwsgiutils/request_tracking/__init__.py
|
camptocamp/c2cwsgiutils
|
7f99937ccebc31c8e97c58de3819458eb9d6435e
|
[
"BSD-2-Clause-FreeBSD"
] | 200
|
2017-02-01T15:13:38.000Z
|
2022-03-31T21:29:54.000Z
|
c2cwsgiutils/request_tracking/__init__.py
|
camptocamp/c2cwsgiutils
|
7f99937ccebc31c8e97c58de3819458eb9d6435e
|
[
"BSD-2-Clause-FreeBSD"
] | 4
|
2019-07-08T12:28:19.000Z
|
2020-06-10T02:44:35.000Z
|
"""
Allows to track the request_id in the logs, the DB and others.
Adds a c2c_request_id attribute to the Pyramid Request class to access it.
"""
import logging
import urllib.parse
import uuid
from typing import Any, Dict, List, Optional, Sequence # noqa # pylint: disable=unused-import
import pyramid.request
import requests.adapters
import requests.models
from pyramid.threadlocal import get_current_request
from c2cwsgiutils import config_utils, stats
ID_HEADERS: List[str] = []
_HTTPAdapter_send = requests.adapters.HTTPAdapter.send
LOG = logging.getLogger(__name__)
DEFAULT_TIMEOUT: Optional[float] = None
def _gen_request_id(request: pyramid.request.Request) -> str:
for id_header in ID_HEADERS:
if id_header in request.headers:
return request.headers[id_header] # type: ignore
return str(uuid.uuid4())
def _patch_requests() -> None:
def send_wrapper(
self: requests.adapters.HTTPAdapter,
request: requests.models.PreparedRequest,
timeout: Optional[float] = None,
**kwargs: Any,
) -> requests.Response:
pyramid_request = get_current_request()
header = ID_HEADERS[0]
if pyramid_request is not None and header not in request.headers:
request.headers[header] = pyramid_request.c2c_request_id
if timeout is None:
if DEFAULT_TIMEOUT is not None:
timeout = DEFAULT_TIMEOUT
else:
LOG.warning("Doing a %s request without timeout to %s", request.method, request.url)
status = 999
timer = stats.timer()
try:
response = _HTTPAdapter_send(self, request, timeout=timeout, **kwargs)
status = response.status_code
return response
finally:
if request.url is not None:
parsed = urllib.parse.urlparse(request.url)
port = parsed.port or (80 if parsed.scheme == "http" else 443)
if stats.USE_TAGS:
key: Sequence[Any] = ["requests"]
tags: Optional[Dict[str, Any]] = dict(
scheme=parsed.scheme,
host=parsed.hostname,
port=port,
method=request.method,
status=status,
)
else:
key = ["requests", parsed.scheme, parsed.hostname, port, request.method, status]
tags = None
timer.stop(key, tags)
requests.adapters.HTTPAdapter.send = send_wrapper # type: ignore
def init(config: Optional[pyramid.config.Configurator] = None) -> None:
"""
Initialize the request tracking.
Use a X-Request-ID (or other) header to track all the logs related to a request
including on the sub services.
"""
global ID_HEADERS, DEFAULT_TIMEOUT
ID_HEADERS = ["X-Request-ID", "X-Correlation-ID", "Request-ID", "X-Varnish", "X-Amzn-Trace-Id"]
if config is not None:
extra_header = config_utils.env_or_config(config, "C2C_REQUEST_ID_HEADER", "c2c.request_id_header")
if extra_header:
ID_HEADERS.insert(0, extra_header)
config.add_request_method(_gen_request_id, "c2c_request_id", reify=True)
DEFAULT_TIMEOUT = config_utils.env_or_config(
config, "C2C_REQUESTS_DEFAULT_TIMEOUT", "c2c.requests_default_timeout", type_=float
)
_patch_requests()
if config_utils.env_or_config(config, "C2C_SQL_REQUEST_ID", "c2c.sql_request_id", False):
from . import _sql
_sql.init()
| 35.93
| 107
| 0.636237
|
217783d02076d1dbb606307df41599f59887e185
| 2,239
|
py
|
Python
|
bpy_lattice/camera.py
|
ChristopherMayes/bpy-lattice
|
66f7d292795b137b24128153adba37035788ef93
|
[
"Apache-2.0"
] | null | null | null |
bpy_lattice/camera.py
|
ChristopherMayes/bpy-lattice
|
66f7d292795b137b24128153adba37035788ef93
|
[
"Apache-2.0"
] | null | null | null |
bpy_lattice/camera.py
|
ChristopherMayes/bpy-lattice
|
66f7d292795b137b24128153adba37035788ef93
|
[
"Apache-2.0"
] | 1
|
2020-07-17T01:53:21.000Z
|
2020-07-17T01:53:21.000Z
|
import bpy
from math import sin, cos, pi, sqrt
def camera_at(d):
cam = bpy.data.objects['Camera'] # bpy.types.Camera
cam.location.x = 0.0
cam.location.y = -d/sqrt(2)
cam.location.z = d/sqrt(2)
cam.rotation_euler.x = pi/4
cam.rotation_euler.y = 0
cam.rotation_euler.z = 0
def ortho_camera_at(z, scale):
cam = bpy.data.objects['Camera'] # bpy.types.Camera
cam.data.type = 'ORTHO'
cam.location.x = 0.0
cam.location.y = 0
cam.location.z = z
cam.rotation_euler.x = 0
cam.rotation_euler.y = 0
cam.rotation_euler.z = 0
cam.data.ortho_scale = scale
def lamp_energy(energy):
lamp = bpy.data.objects['Lamp']
lamp.location.z = 10
lamp.data.energy = energy
def lighting(x,y,z):
bpy.ops.mesh.primitive_plane_add(location=(x,y,z))
bpy.context.active_object.data.materials.append(materials.LIGHT_MATERIAL)
def sun(strength):
#bpy.data.node_groups["Shader Nodetree"].nodes["Emission"].inputs[1].default_value = 0.8
bpy.ops.object.lamp_add(type='SUN', view_align=False, location=(0, 0, 10) )
bpy.context.active_object.data.node_tree.nodes["Emission"].inputs[1].default_value = strength
# Floor
def make_floor():
bpy.ops.mesh.primitive_plane_add(location=(0,0,0))
bpy.context.object.scale[0] = 20
bpy.context.object.scale[1] = 10
mat = diffuse_material('floor_material', color=(.2,.2,.2,1))
bpy.context.active_object.data.materials.append(mat)
#make_floor()
def ortho_camera_at(z, scale):
cam = bpy.data.objects['Camera'] # bpy.types.Camera
cam.data.type = 'ORTHO'
cam.location.x = 0.0
cam.location.y = 0
cam.location.z = z
cam.rotation_euler.x = 0
cam.rotation_euler.y = 0
cam.rotation_euler.z = 0
cam.data.ortho_scale = scale
def L0E_camera():
cam = bpy.data.objects['Camera'] # bpy.types.Camera
cam.location.x = 16.4
cam.location.y = -12.1
cam.location.z = 49
cam.rotation_euler.x = 22*pi/180
cam.rotation_euler.y = 0
cam.rotation_euler.z = 0
def L0E_camera15mm():
cam = bpy.data.objects['Camera'] # bpy.types.Camera
cam.location.x = -14.42104
cam.location.y = -8.99317
cam.location.z = 7.20855
cam.rotation_euler.x = 68.385*pi/180
cam.rotation_euler.y = 0
cam.rotation_euler.z = -47.829*pi/180
| 26.341176
| 97
| 0.687807
|
e0d9d0474d53e26bb0916c55c2f522666894f511
| 1,554
|
py
|
Python
|
NeuralNet.py
|
NoahRJohnson/AlphaReZero
|
e21084ef3896dec47b22de96656144220c046402
|
[
"MIT"
] | null | null | null |
NeuralNet.py
|
NoahRJohnson/AlphaReZero
|
e21084ef3896dec47b22de96656144220c046402
|
[
"MIT"
] | null | null | null |
NeuralNet.py
|
NoahRJohnson/AlphaReZero
|
e21084ef3896dec47b22de96656144220c046402
|
[
"MIT"
] | null | null | null |
class NeuralNet():
"""
This class specifies the base NeuralNet class. To define your own neural
network, subclass this class and implement the functions below. The neural
network does not consider the current player, and instead only deals with
the canonical form of the board.
See miniShogi/NNet.py for an example implementation.
"""
def __init__(self, game):
pass
def train(self, examples):
"""
This function trains the neural network with examples obtained from
self-play.
Input:
examples: a list of training examples, where each example is of form
(board, pi, v). pi is the MCTS informed policy vector for
the given board, and v is its value. The examples has
board in its canonical form.
"""
pass
def predict(self, board):
"""
Input:
board: current board in its canonical form.
Returns:
pi: a policy vector for the current board- a numpy array of length
game.getActionSize
v: a float in [-1,1] that gives the value of the current board
"""
pass
def save_checkpoint(self, folder, filename):
"""
Saves the current neural network (with its parameters) in
folder/filename
"""
pass
def load_checkpoint(self, folder, filename):
"""
Loads parameters of the neural network from folder/filename
"""
pass
| 30.470588
| 80
| 0.593308
|
c898177f976c370b0a0dea1ba4302a5225e1f050
| 4,129
|
py
|
Python
|
models/data/transforms/transform_util.py
|
giorking/CenterNet_Pro_Max
|
dc50e7dd4b10eff5ad8f428641cc2f9a7ba01ce6
|
[
"Apache-2.0"
] | 2
|
2020-05-05T07:08:05.000Z
|
2020-06-19T03:20:21.000Z
|
models/data/transforms/transform_util.py
|
cavalleria/CenterNet_Pro_Max
|
bf2f6708c83ca2cb716315bb1aaf71039f458746
|
[
"Apache-2.0"
] | null | null | null |
models/data/transforms/transform_util.py
|
cavalleria/CenterNet_Pro_Max
|
bf2f6708c83ca2cb716315bb1aaf71039f458746
|
[
"Apache-2.0"
] | 2
|
2020-04-08T14:35:47.000Z
|
2020-04-13T06:24:05.000Z
|
#!/usr/bin/env python3
#
# Copyright (c) 2020 jintian.
#
# This file is part of CenterNet_Pro_Max
# (see jinfagang.github.io).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import numpy as np
import torch
# pyre-ignore-all-errors
def to_float_tensor(numpy_array: np.ndarray) -> torch.Tensor:
"""
Convert the numpy array to torch float tensor with dimension of NxCxHxW.
Pytorch is not fully supporting uint8, so convert tensor to float if the
numpy_array is uint8.
Args:
numpy_array (ndarray): of shape NxHxWxC, or HxWxC or HxW to
represent an image. The array can be of type uint8 in range
[0, 255], or floating point in range [0, 1] or [0, 255].
Returns:
float_tensor (tensor): converted float tensor.
"""
assert isinstance(numpy_array, np.ndarray)
assert len(numpy_array.shape) in (2, 3, 4)
# Some of the input numpy array has negative strides. Pytorch currently
# does not support negative strides, perform ascontiguousarray to
# resolve the issue.
float_tensor = torch.from_numpy(np.ascontiguousarray(numpy_array))
if numpy_array.dtype == np.uint8:
float_tensor = float_tensor.float()
if len(numpy_array.shape) == 2:
# HxW -> 1x1xHxW.
float_tensor = float_tensor[None, None, :, :]
elif len(numpy_array.shape) == 3:
# HxWxC -> 1xCxHxW.
float_tensor = float_tensor.permute(2, 0, 1)
float_tensor = float_tensor[None, :, :, :]
elif len(numpy_array.shape) == 4:
# NxHxWxC -> NxCxHxW
float_tensor = float_tensor.permute(0, 3, 1, 2)
else:
raise NotImplementedError(
"Unknow numpy_array dimension of {}".format(float_tensor.shape)
)
return float_tensor
def to_numpy(
float_tensor: torch.Tensor, target_shape: list, target_dtype: np.dtype
) -> np.ndarray:
"""
Convert float tensor with dimension of NxCxHxW back to numpy array.
Args:
float_tensor (tensor): a float pytorch tensor with shape of NxCxHxW.
target_shape (list): the target shape of the numpy array to represent
the image as output. options include NxHxWxC, or HxWxC or HxW.
target_dtype (dtype): the target dtype of the numpy array to represent
the image as output. The array can be of type uint8 in range
[0, 255], or floating point in range [0, 1] or [0, 255].
Returns:
(ndarray): converted numpy array.
"""
assert len(target_shape) in (2, 3, 4)
if len(target_shape) == 2:
# 1x1xHxW -> HxW.
assert float_tensor.shape[0] == 1
assert float_tensor.shape[1] == 1
float_tensor = float_tensor[0, 0, :, :]
elif len(target_shape) == 3:
assert float_tensor.shape[0] == 1
# 1xCxHxW -> HxWxC.
float_tensor = float_tensor[0].permute(1, 2, 0)
elif len(target_shape) == 4:
# NxCxHxW -> NxHxWxC
float_tensor = float_tensor.permute(0, 2, 3, 1)
else:
raise NotImplementedError(
"Unknow target shape dimension of {}".format(target_shape)
)
if target_dtype == np.uint8:
# Need to specifically call round here, notice in pytroch the round
# is half to even.
# https://github.com/pytorch/pytorch/issues/16498
float_tensor = float_tensor.round().byte()
return float_tensor.numpy()
| 37.880734
| 78
| 0.667716
|
a6f9ac5eb51421c1e1b86011f8d39435ad78cc67
| 15,670
|
py
|
Python
|
numpyro/distributions/constraints.py
|
tcbegley/numpyro
|
e55f0d41c9eba48a10e88fb403a5e016f18857e6
|
[
"Apache-2.0"
] | null | null | null |
numpyro/distributions/constraints.py
|
tcbegley/numpyro
|
e55f0d41c9eba48a10e88fb403a5e016f18857e6
|
[
"Apache-2.0"
] | null | null | null |
numpyro/distributions/constraints.py
|
tcbegley/numpyro
|
e55f0d41c9eba48a10e88fb403a5e016f18857e6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
# The implementation follows the design in PyTorch: torch.distributions.constraints.py
#
# Copyright (c) 2016- Facebook, Inc (Adam Paszke)
# Copyright (c) 2014- Facebook, Inc (Soumith Chintala)
# Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert)
# Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu)
# Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu)
# Copyright (c) 2011-2013 NYU (Clement Farabet)
# Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston)
# Copyright (c) 2006 Idiap Research Institute (Samy Bengio)
# Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz)
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__all__ = [
"boolean",
"circular",
"corr_cholesky",
"corr_matrix",
"dependent",
"greater_than",
"integer_interval",
"integer_greater_than",
"interval",
"is_dependent",
"less_than",
"lower_cholesky",
"multinomial",
"nonnegative_integer",
"positive",
"positive_definite",
"positive_integer",
"real",
"real_vector",
"simplex",
"sphere",
"softplus_lower_cholesky",
"softplus_positive",
"unit_interval",
"Constraint",
]
import math
import numpy as np
import jax.numpy
class Constraint(object):
"""
Abstract base class for constraints.
A constraint object represents a region over which a variable is valid,
e.g. within which a variable can be optimized.
"""
is_discrete = False
event_dim = 0
def __call__(self, x):
raise NotImplementedError
def check(self, value):
"""
Returns a byte tensor of `sample_shape + batch_shape` indicating
whether each event in value satisfies this constraint.
"""
return self(value)
def feasible_like(self, prototype):
"""
Get a feasible value which has the same shape as dtype as `prototype`.
"""
raise NotImplementedError
class _Boolean(Constraint):
is_discrete = True
def __call__(self, x):
return (x == 0) | (x == 1)
def feasible_like(self, prototype):
return jax.numpy.zeros_like(prototype)
class _CorrCholesky(Constraint):
event_dim = 2
def __call__(self, x):
jnp = np if isinstance(x, (np.ndarray, np.generic)) else jax.numpy
tril = jnp.tril(x)
lower_triangular = jnp.all(
jnp.reshape(tril == x, x.shape[:-2] + (-1,)), axis=-1
)
positive_diagonal = jnp.all(jnp.diagonal(x, axis1=-2, axis2=-1) > 0, axis=-1)
x_norm = jnp.linalg.norm(x, axis=-1)
unit_norm_row = jnp.all((x_norm <= 1) & (x_norm > 1 - 1e-6), axis=-1)
return lower_triangular & positive_diagonal & unit_norm_row
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.eye(prototype.shape[-1]), prototype.shape
)
class _CorrMatrix(Constraint):
event_dim = 2
def __call__(self, x):
jnp = np if isinstance(x, (np.ndarray, np.generic)) else jax.numpy
# check for symmetric
symmetric = jnp.all(jnp.all(x == jnp.swapaxes(x, -2, -1), axis=-1), axis=-1)
# check for the smallest eigenvalue is positive
positive = jnp.linalg.eigh(x)[0][..., 0] > 0
# check for diagonal equal to 1
unit_variance = jnp.all(
jnp.abs(jnp.diagonal(x, axis1=-2, axis2=-1) - 1) < 1e-6, axis=-1
)
return symmetric & positive & unit_variance
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.eye(prototype.shape[-1]), prototype.shape
)
class _Dependent(Constraint):
"""
Placeholder for variables whose support depends on other variables.
These variables obey no simple coordinate-wise constraints.
:param bool is_discrete: Optional value of ``.is_discrete`` in case this
can be computed statically. If not provided, access to the
``.is_discrete`` attribute will raise a NotImplementedError.
:param int event_dim: Optional value of ``.event_dim`` in case this can be
computed statically. If not provided, access to the ``.event_dim``
attribute will raise a NotImplementedError.
"""
def __init__(self, *, is_discrete=NotImplemented, event_dim=NotImplemented):
self._is_discrete = is_discrete
self._event_dim = event_dim
super().__init__()
@property
def is_discrete(self):
if self._is_discrete is NotImplemented:
raise NotImplementedError(".is_discrete cannot be determined statically")
return self._is_discrete
@property
def event_dim(self):
if self._event_dim is NotImplemented:
raise NotImplementedError(".event_dim cannot be determined statically")
return self._event_dim
def __call__(self, x=None, *, is_discrete=NotImplemented, event_dim=NotImplemented):
if x is not None:
raise ValueError("Cannot determine validity of dependent constraint")
# Support for syntax to customize static attributes::
# constraints.dependent(is_discrete=True, event_dim=1)
if is_discrete is NotImplemented:
is_discrete = self._is_discrete
if event_dim is NotImplemented:
event_dim = self._event_dim
return _Dependent(is_discrete=is_discrete, event_dim=event_dim)
class dependent_property(property, _Dependent):
def __init__(
self, fn=None, *, is_discrete=NotImplemented, event_dim=NotImplemented
):
super().__init__(fn)
self._is_discrete = is_discrete
self._event_dim = event_dim
def __call__(self, x):
if not callable(x):
return super().__call__(x)
# Support for syntax to customize static attributes::
# @constraints.dependent_property(is_discrete=True, event_dim=1)
# def support(self):
# ...
return dependent_property(
x, is_discrete=self._is_discrete, event_dim=self._event_dim
)
def is_dependent(constraint):
return isinstance(constraint, _Dependent)
class _GreaterThan(Constraint):
def __init__(self, lower_bound):
self.lower_bound = lower_bound
def __call__(self, x):
return x > self.lower_bound
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(self.lower_bound + 1, jax.numpy.shape(prototype))
class _IndependentConstraint(Constraint):
"""
Wraps a constraint by aggregating over ``reinterpreted_batch_ndims``-many
dims in :meth:`check`, so that an event is valid only if all its
independent entries are valid.
"""
def __init__(self, base_constraint, reinterpreted_batch_ndims):
assert isinstance(base_constraint, Constraint)
assert isinstance(reinterpreted_batch_ndims, int)
assert reinterpreted_batch_ndims >= 0
if isinstance(base_constraint, _IndependentConstraint):
reinterpreted_batch_ndims = (
reinterpreted_batch_ndims + base_constraint.reinterpreted_batch_ndims
)
base_constraint = base_constraint.base_constraint
self.base_constraint = base_constraint
self.reinterpreted_batch_ndims = reinterpreted_batch_ndims
super().__init__()
@property
def event_dim(self):
return self.base_constraint.event_dim + self.reinterpreted_batch_ndims
def __call__(self, value):
result = self.base_constraint(value)
if self.reinterpreted_batch_ndims == 0:
return result
elif jax.numpy.ndim(result) < self.reinterpreted_batch_ndims:
expected = self.event_dim
raise ValueError(
f"Expected value.dim() >= {expected} but got {jax.numpy.ndim(value)}"
)
result = result.reshape(
jax.numpy.shape(result)[
: jax.numpy.ndim(result) - self.reinterpreted_batch_ndims
]
+ (-1,)
)
result = result.all(-1)
return result
def feasible_like(self, prototype):
return self.base_constraint.feasible_like(prototype)
class _LessThan(Constraint):
def __init__(self, upper_bound):
self.upper_bound = upper_bound
def __call__(self, x):
return x < self.upper_bound
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(self.upper_bound - 1, jax.numpy.shape(prototype))
class _IntegerInterval(Constraint):
is_discrete = True
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, x):
return (x >= self.lower_bound) & (x <= self.upper_bound) & (x % 1 == 0)
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(self.lower_bound, jax.numpy.shape(prototype))
class _IntegerGreaterThan(Constraint):
is_discrete = True
def __init__(self, lower_bound):
self.lower_bound = lower_bound
def __call__(self, x):
return (x % 1 == 0) & (x >= self.lower_bound)
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(self.lower_bound, jax.numpy.shape(prototype))
class _Interval(Constraint):
def __init__(self, lower_bound, upper_bound):
self.lower_bound = lower_bound
self.upper_bound = upper_bound
def __call__(self, x):
return (x >= self.lower_bound) & (x <= self.upper_bound)
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
(self.lower_bound + self.upper_bound) / 2, jax.numpy.shape(prototype)
)
class _LowerCholesky(Constraint):
event_dim = 2
def __call__(self, x):
jnp = np if isinstance(x, (np.ndarray, np.generic)) else jax.numpy
tril = jnp.tril(x)
lower_triangular = jnp.all(
jnp.reshape(tril == x, x.shape[:-2] + (-1,)), axis=-1
)
positive_diagonal = jnp.all(jnp.diagonal(x, axis1=-2, axis2=-1) > 0, axis=-1)
return lower_triangular & positive_diagonal
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.eye(prototype.shape[-1]), prototype.shape
)
class _Multinomial(Constraint):
is_discrete = True
event_dim = 1
def __init__(self, upper_bound):
self.upper_bound = upper_bound
def __call__(self, x):
return (x >= 0).all(axis=-1) & (x.sum(axis=-1) == self.upper_bound)
def feasible_like(self, prototype):
pad_width = ((0, 0),) * jax.numpy.ndim(self.upper_bound) + (
(0, prototype.shape[-1] - 1),
)
value = jax.numpy.pad(jax.numpy.expand_dims(self.upper_bound, -1), pad_width)
return jax.numpy.broadcast_to(value, prototype.shape)
class _OrderedVector(Constraint):
event_dim = 1
def __call__(self, x):
return (x[..., 1:] > x[..., :-1]).all(axis=-1)
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.arange(float(prototype.shape[-1])), prototype.shape
)
class _PositiveDefinite(Constraint):
event_dim = 2
def __call__(self, x):
jnp = np if isinstance(x, (np.ndarray, np.generic)) else jax.numpy
# check for symmetric
symmetric = jnp.all(jnp.all(x == jnp.swapaxes(x, -2, -1), axis=-1), axis=-1)
# check for the smallest eigenvalue is positive
positive = jnp.linalg.eigh(x)[0][..., 0] > 0
return symmetric & positive
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.eye(prototype.shape[-1]), prototype.shape
)
class _PositiveOrderedVector(Constraint):
"""
Constrains to a positive real-valued tensor where the elements are monotonically
increasing along the `event_shape` dimension.
"""
event_dim = 1
def __call__(self, x):
return ordered_vector.check(x) & independent(positive, 1).check(x)
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.exp(jax.numpy.arange(float(prototype.shape[-1]))), prototype.shape
)
class _Real(Constraint):
def __call__(self, x):
# XXX: consider to relax this condition to [-inf, inf] interval
return (x == x) & (x != float("inf")) & (x != float("-inf"))
def feasible_like(self, prototype):
return jax.numpy.zeros_like(prototype)
class _Simplex(Constraint):
event_dim = 1
def __call__(self, x):
x_sum = x.sum(axis=-1)
return (x >= 0).all(axis=-1) & (x_sum < 1 + 1e-6) & (x_sum > 1 - 1e-6)
def feasible_like(self, prototype):
return jax.numpy.full_like(prototype, 1 / prototype.shape[-1])
class _SoftplusPositive(_GreaterThan):
def __init__(self):
super().__init__(lower_bound=0.0)
def feasible_like(self, prototype):
return jax.numpy.full(jax.numpy.shape(prototype), np.log(2))
class _SoftplusLowerCholesky(_LowerCholesky):
def feasible_like(self, prototype):
return jax.numpy.broadcast_to(
jax.numpy.eye(prototype.shape[-1]) * np.log(2), prototype.shape
)
class _Sphere(Constraint):
"""
Constrain to the Euclidean sphere of any dimension.
"""
event_dim = 1
reltol = 10.0 # Relative to finfo.eps.
def __call__(self, x):
jnp = np if isinstance(x, (np.ndarray, np.generic)) else jax.numpy
eps = jnp.finfo(x.dtype).eps
norm = jnp.linalg.norm(x, axis=-1)
error = jnp.abs(norm - 1)
return error < self.reltol * eps * x.shape[-1] ** 0.5
def feasible_like(self, prototype):
return jax.numpy.full_like(prototype, prototype.shape[-1] ** (-0.5))
# TODO: Make types consistent
# See https://github.com/pytorch/pytorch/issues/50616
boolean = _Boolean()
circular = _Interval(-math.pi, math.pi)
corr_cholesky = _CorrCholesky()
corr_matrix = _CorrMatrix()
dependent = _Dependent()
greater_than = _GreaterThan
less_than = _LessThan
independent = _IndependentConstraint
integer_interval = _IntegerInterval
integer_greater_than = _IntegerGreaterThan
interval = _Interval
lower_cholesky = _LowerCholesky()
multinomial = _Multinomial
nonnegative_integer = _IntegerGreaterThan(0)
ordered_vector = _OrderedVector()
positive = _GreaterThan(0.0)
positive_definite = _PositiveDefinite()
positive_integer = _IntegerGreaterThan(1)
positive_ordered_vector = _PositiveOrderedVector()
real = _Real()
real_vector = independent(real, 1)
simplex = _Simplex()
softplus_lower_cholesky = _SoftplusLowerCholesky()
softplus_positive = _SoftplusPositive()
sphere = _Sphere()
unit_interval = _Interval(0.0, 1.0)
| 32.309278
| 108
| 0.662157
|
495eea9107249c9ef6af0464268cc1e2e9fd6b2f
| 3,272
|
py
|
Python
|
coq/doc/tools/coqrst/repl/ansicolors.py
|
reichel3/TacTok
|
c344e76263de04311af8a0030c07aec95d87f71c
|
[
"MIT"
] | 7
|
2020-11-23T02:45:36.000Z
|
2022-03-18T03:03:33.000Z
|
coq/doc/tools/coqrst/repl/ansicolors.py
|
reichel3/TacTok
|
c344e76263de04311af8a0030c07aec95d87f71c
|
[
"MIT"
] | 4
|
2021-02-23T03:03:51.000Z
|
2021-11-13T00:07:38.000Z
|
coq/doc/tools/coqrst/repl/ansicolors.py
|
reichel3/TacTok
|
c344e76263de04311af8a0030c07aec95d87f71c
|
[
"MIT"
] | 2
|
2021-01-19T17:56:28.000Z
|
2022-03-28T04:39:41.000Z
|
##########################################################################
## # The Coq Proof Assistant / The Coq Development Team ##
## v # INRIA, CNRS and contributors - Copyright 1999-2018 ##
## <O___,, # (see CREDITS file for the list of authors) ##
## \VV/ ###############################################################
## // # This file is distributed under the terms of the ##
## # GNU Lesser General Public License Version 2.1 ##
## # (see LICENSE file for the text of the license) ##
##########################################################################
"""
Parse Coq's ANSI output.
========================
Translated to Python from Coq's terminal.ml.
"""
# pylint: disable=too-many-return-statements, too-many-branches
def parse_color(style, offset):
color = style[offset] % 10
if color == 0:
return ("black", 1)
elif color == 1:
return ("red", 1)
elif color == 2:
return ("green", 1)
elif color == 3:
return ("yellow", 1)
elif color == 4:
return ("blue", 1)
elif color == 5:
return ("magenta", 1)
elif color == 6:
return ("cyan", 1)
elif color == 7:
return ("white", 1)
elif color == 9:
return ("default", 1)
elif color == 8:
nxt = style[offset + 1]
if nxt == 5:
return ("index-{}".format(style[offset + 1]), 2)
elif nxt == 2:
return ("rgb-{}-{}-{}".format(*style[offset+1:offset+4]), 4)
else:
raise ValueError("{}, {}".format(style, offset))
else:
raise ValueError()
def parse_style(style, offset, acc):
offset = 0
while offset < len(style):
head = style[offset]
if head == 0:
acc.append("reset")
elif head == 1:
acc.append("bold")
elif head == 3:
acc.append("italic")
elif head == 4:
acc.append("underline")
elif head == 7:
acc.append("negative")
elif head == 22:
acc.append("no-bold")
elif head == 23:
acc.append("no-italic")
elif head == 24:
acc.append("no-underline")
elif head == 27:
acc.append("no-negative")
else:
color, suboffset = parse_color(style, offset)
offset += suboffset - 1
if 30 <= head < 40:
acc.append("fg-{}".format(color))
elif 40 <= head < 50:
acc.append("bg-{}".format(color))
elif 90 <= head < 100:
acc.append("fg-light-{}".format(color))
elif 100 <= head < 110:
acc.append("bg-light-{}".format(color))
offset += 1
def parse_ansi(code):
"""Parse an ansi code into a collection of CSS classes.
:param code: A sequence of ‘;’-separated ANSI codes. Do not include the
leading ‘^[[’ or the final ‘m’
"""
classes = []
parse_style([int(c) for c in code.split(';')], 0, classes)
return ["ansi-" + cls for cls in classes]
if __name__ == '__main__':
# As produced by Coq with ‘Check nat.’
print(parse_ansi("92;49;22;23;24;27"))
| 32.72
| 76
| 0.466076
|
6ffd004231b0151c9a0bb874cee8a81b07ecb9f9
| 694
|
py
|
Python
|
setup.py
|
monosloth/console
|
a47e1479320a18a4b5716e87ee275985ebd5825f
|
[
"MIT"
] | null | null | null |
setup.py
|
monosloth/console
|
a47e1479320a18a4b5716e87ee275985ebd5825f
|
[
"MIT"
] | null | null | null |
setup.py
|
monosloth/console
|
a47e1479320a18a4b5716e87ee275985ebd5825f
|
[
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="monosloth-console",
version="0.0.1",
license='MIT',
author="monosloth",
author_email="admin@monosloth.com",
description="A command line utility for monosloth",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/monosloth/console",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=[
'pyyaml'
],
)
| 26.692308
| 55
| 0.652738
|
9cf2b7b7c2cac54ab9a1ab2cfa036e14372d0093
| 4,950
|
py
|
Python
|
MachineLearningPyFiles_DataScience/demo17_classificationcancer.py
|
mahnooranjum/Programming_DataScience
|
f7a4215d4615b3f8460c3a1944a585628cf6930d
|
[
"MIT"
] | null | null | null |
MachineLearningPyFiles_DataScience/demo17_classificationcancer.py
|
mahnooranjum/Programming_DataScience
|
f7a4215d4615b3f8460c3a1944a585628cf6930d
|
[
"MIT"
] | null | null | null |
MachineLearningPyFiles_DataScience/demo17_classificationcancer.py
|
mahnooranjum/Programming_DataScience
|
f7a4215d4615b3f8460c3a1944a585628cf6930d
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Visualising the results
def plot_model(classifier, X_set, y_set, y_test, y_pred, text):
from sklearn.metrics import accuracy_score
print("===== Accuracy Score =====")
print(accuracy_score(y_test, y_pred))
from sklearn.metrics import classification_report
print("===== Accuracy Score =====")
class_report = classification_report(y_test, y_pred)
print(class_report)
from matplotlib.colors import ListedColormap
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('pink', 'cyan')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'blue'))(i), label = j)
plt.title(text)
plt.xlabel('X')
plt.ylabel('y')
plt.legend()
plt.show()
"""## Get Breast Cancer Dataset"""
from sklearn.datasets import load_breast_cancer
data = load_breast_cancer()
data.keys()
X = data.data
y = data.target
# TRAIN TEST SPLIT
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.1)
# Applying PCA
from sklearn.decomposition import PCA
pca = PCA(n_components = 2)
X_train = pca.fit_transform(X_train)
X_test = pca.transform(X_test)
pca.explained_variance_ratio_
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "Logistic Regression")
from sklearn.linear_model import RidgeClassifierCV
classifier = RidgeClassifierCV()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "RidgeClassifierCV")
from sklearn.svm import SVC
classifier = SVC()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "SVC")
from sklearn.neural_network import MLPClassifier
classifier = MLPClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "SVC")
from sklearn.svm import LinearSVC
classifier = LinearSVC()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "LinearSVC")
from sklearn.ensemble import RandomForestClassifier
classifier = RandomForestClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "RandomForestClassifier")
from sklearn.tree import DecisionTreeClassifier
classifier = DecisionTreeClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "DecisionTreeClassifier")
from sklearn.ensemble import GradientBoostingClassifier
classifier = GradientBoostingClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "GradientBoostingClassifier")
from sklearn.linear_model import SGDClassifier
classifier = SGDClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "SGDClassifier")
from sklearn.linear_model import Perceptron
classifier = GradientBoostingClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "Perceptron")
from sklearn.naive_bayes import GaussianNB
classifier = GaussianNB()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "GaussianNB")
from sklearn.neighbors import KNeighborsClassifier
classifier = KNeighborsClassifier()
classifier.fit(X_train,y_train)
y_pred = classifier.predict(X_test)
y_pred = np.round(y_pred).flatten()
plot_model(classifier, X_train, y_train, y_test, y_pred, "KNeighborsClassifier")
| 34.615385
| 107
| 0.764444
|
93f55ff0315d287f22c31a5011458da16ad10f21
| 34,826
|
py
|
Python
|
common/middleware/tempauth.py
|
escudocloud/encswift_server
|
90a5a999c06b40da89d5f785795f80bd9d990640
|
[
"Apache-2.0"
] | null | null | null |
common/middleware/tempauth.py
|
escudocloud/encswift_server
|
90a5a999c06b40da89d5f785795f80bd9d990640
|
[
"Apache-2.0"
] | null | null | null |
common/middleware/tempauth.py
|
escudocloud/encswift_server
|
90a5a999c06b40da89d5f785795f80bd9d990640
|
[
"Apache-2.0"
] | 1
|
2016-09-27T09:08:24.000Z
|
2016-09-27T09:08:24.000Z
|
# Copyright (c) 2011-2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from time import time
from traceback import format_exc
from urllib import unquote
from uuid import uuid4
from hashlib import sha1
import hmac
import base64
from eventlet import Timeout
from swift.common.swob import Response, Request
from swift.common.swob import HTTPBadRequest, HTTPForbidden, HTTPNotFound, \
HTTPUnauthorized
from swift.common.request_helpers import get_sys_meta_prefix
from swift.common.middleware.acl import (
clean_acl, parse_acl, referrer_allowed, acls_from_account_info)
from swift.common.utils import cache_from_env, get_logger, \
split_path, config_true_value, register_swift_info
from swift.common.utils import config_read_reseller_options
from swift.proxy.controllers.base import get_account_info
class TempAuth(object):
"""
Test authentication and authorization system.
Add to your pipeline in proxy-server.conf, such as::
[pipeline:main]
pipeline = catch_errors cache tempauth proxy-server
Set account auto creation to true in proxy-server.conf::
[app:proxy-server]
account_autocreate = true
And add a tempauth filter section, such as::
[filter:tempauth]
use = egg:swift#tempauth
user_admin_admin = admin .admin .reseller_admin
user_test_tester = testing .admin
user_test2_tester2 = testing2 .admin
user_test_tester3 = testing3
# To allow accounts/users with underscores you can base64 encode them.
# Here is the account "under_score" and username "a_b" (note the lack
# of padding equal signs):
user64_dW5kZXJfc2NvcmU_YV9i = testing4
See the proxy-server.conf-sample for more information.
Multiple Reseller Prefix Items:
The reseller prefix specifies which parts of the account namespace this
middleware is responsible for managing authentication and authorization.
By default, the prefix is AUTH so accounts and tokens are prefixed
by AUTH_. When a request's token and/or path start with AUTH_, this
middleware knows it is responsible.
We allow the reseller prefix to be a list. In tempauth, the first item
in the list is used as the prefix for tokens and user groups. The
other prefixes provide alternate accounts that user's can access. For
example if the reseller prefix list is 'AUTH, OTHER', a user with
admin access to AUTH_account also has admin access to
OTHER_account.
Required Group:
The group .admin is normally needed to access an account (ACLs provide
an additional way to access an account). You can specify the
``require_group`` parameter. This means that you also need the named group
to access an account. If you have several reseller prefix items, prefix
the ``require_group`` parameter with the appropriate prefix.
X-Service-Token:
If an X-Service-Token is presented in the request headers, the groups
derived from the token are appended to the roles derived form
X-Auth-Token. If X-Auth-Token is missing or invalid, X-Service-Token
is not processed.
The X-Service-Token is useful when combined with multiple reseller prefix
items. In the following configuration, accounts prefixed SERVICE_
are only accessible if X-Auth-Token is form the end-user and
X-Service-Token is from the ``glance`` user::
[filter:tempauth]
use = egg:swift#tempauth
reseller_prefix = AUTH, SERVICE
SERVICE_require_group = .service
user_admin_admin = admin .admin .reseller_admin
user_joeacct_joe = joepw .admin
user_maryacct_mary = marypw .admin
user_glance_glance = glancepw .service
The name .service is an example. Unlike .admin and .reseller_admin
it is not a reserved name.
Account ACLs:
If a swift_owner issues a POST or PUT to the account, with the
X-Account-Access-Control header set in the request, then this may
allow certain types of access for additional users.
* Read-Only: Users with read-only access can list containers in the
account, list objects in any container, retrieve objects, and view
unprivileged account/container/object metadata.
* Read-Write: Users with read-write access can (in addition to the
read-only privileges) create objects, overwrite existing objects,
create new containers, and set unprivileged container/object
metadata.
* Admin: Users with admin access are swift_owners and can perform
any action, including viewing/setting privileged metadata (e.g.
changing account ACLs).
To generate headers for setting an account ACL::
from swift.common.middleware.acl import format_acl
acl_data = { 'admin': ['alice'], 'read-write': ['bob', 'carol'] }
header_value = format_acl(version=2, acl_dict=acl_data)
To generate a curl command line from the above::
token=...
storage_url=...
python -c '
from swift.common.middleware.acl import format_acl
acl_data = { 'admin': ['alice'], 'read-write': ['bob', 'carol'] }
headers = {'X-Account-Access-Control':
format_acl(version=2, acl_dict=acl_data)}
header_str = ' '.join(["-H '%s: %s'" % (k, v)
for k, v in headers.items()])
print ('curl -D- -X POST -H "x-auth-token: $token" %s '
'$storage_url' % header_str)
'
:param app: The next WSGI app in the pipeline
:param conf: The dict of configuration values from the Paste config file
"""
def __init__(self, app, conf):
self.app = app
self.conf = conf
self.logger = get_logger(conf, log_route='tempauth')
self.log_headers = config_true_value(conf.get('log_headers', 'f'))
self.reseller_prefixes, self.account_rules = \
config_read_reseller_options(conf, dict(require_group=''))
self.reseller_prefix = self.reseller_prefixes[0]
self.logger.set_statsd_prefix('tempauth.%s' % (
self.reseller_prefix if self.reseller_prefix else 'NONE',))
self.auth_prefix = conf.get('auth_prefix', '/auth/')
if not self.auth_prefix or not self.auth_prefix.strip('/'):
self.logger.warning('Rewriting invalid auth prefix "%s" to '
'"/auth/" (Non-empty auth prefix path '
'is required)' % self.auth_prefix)
self.auth_prefix = '/auth/'
if self.auth_prefix[0] != '/':
self.auth_prefix = '/' + self.auth_prefix
if self.auth_prefix[-1] != '/':
self.auth_prefix += '/'
self.token_life = int(conf.get('token_life', 86400))
self.allow_overrides = config_true_value(
conf.get('allow_overrides', 't'))
self.storage_url_scheme = conf.get('storage_url_scheme', 'default')
self.users = {}
for conf_key in conf:
if conf_key.startswith('user_') or conf_key.startswith('user64_'):
account, username = conf_key.split('_', 1)[1].split('_')
if conf_key.startswith('user64_'):
# Because trailing equal signs would screw up config file
# parsing, we auto-pad with '=' chars.
account += '=' * (len(account) % 4)
account = base64.b64decode(account)
username += '=' * (len(username) % 4)
username = base64.b64decode(username)
values = conf[conf_key].split()
if not values:
raise ValueError('%s has no key set' % conf_key)
key = values.pop(0)
if values and ('://' in values[-1] or '$HOST' in values[-1]):
url = values.pop()
else:
url = '$HOST/v1/%s%s' % (self.reseller_prefix, account)
self.users[account + ':' + username] = {
'key': key, 'url': url, 'groups': values}
def __call__(self, env, start_response):
"""
Accepts a standard WSGI application call, authenticating the request
and installing callback hooks for authorization and ACL header
validation. For an authenticated request, REMOTE_USER will be set to a
comma separated list of the user's groups.
With a non-empty reseller prefix, acts as the definitive auth service
for just tokens and accounts that begin with that prefix, but will deny
requests outside this prefix if no other auth middleware overrides it.
With an empty reseller prefix, acts as the definitive auth service only
for tokens that validate to a non-empty set of groups. For all other
requests, acts as the fallback auth service when no other auth
middleware overrides it.
Alternatively, if the request matches the self.auth_prefix, the request
will be routed through the internal auth request handler (self.handle).
This is to handle granting tokens, etc.
"""
if self.allow_overrides and env.get('swift.authorize_override', False):
return self.app(env, start_response)
if env.get('PATH_INFO', '').startswith(self.auth_prefix):
return self.handle(env, start_response)
s3 = env.get('HTTP_AUTHORIZATION')
token = env.get('HTTP_X_AUTH_TOKEN', env.get('HTTP_X_STORAGE_TOKEN'))
service_token = env.get('HTTP_X_SERVICE_TOKEN')
if s3 or (token and token.startswith(self.reseller_prefix)):
# Note: Empty reseller_prefix will match all tokens.
groups = self.get_groups(env, token)
if service_token:
service_groups = self.get_groups(env, service_token)
if groups and service_groups:
groups += ',' + service_groups
if groups:
user = groups and groups.split(',', 1)[0] or ''
trans_id = env.get('swift.trans_id')
self.logger.debug('User: %s uses token %s (trans_id %s)' %
(user, 's3' if s3 else token, trans_id))
env['REMOTE_USER'] = groups
env['swift.authorize'] = self.authorize
env['swift.clean_acl'] = clean_acl
if '.reseller_admin' in groups:
env['reseller_request'] = True
else:
# Unauthorized token
if self.reseller_prefix and not s3:
# Because I know I'm the definitive auth for this token, I
# can deny it outright.
self.logger.increment('unauthorized')
try:
vrs, realm, rest = split_path(env['PATH_INFO'],
2, 3, True)
except ValueError:
realm = 'unknown'
return HTTPUnauthorized(headers={
'Www-Authenticate': 'Swift realm="%s"' % realm})(
env, start_response)
# Because I'm not certain if I'm the definitive auth for empty
# reseller_prefixed tokens, I won't overwrite swift.authorize.
elif 'swift.authorize' not in env:
env['swift.authorize'] = self.denied_response
else:
if self._is_definitive_auth(env.get('PATH_INFO', '')):
# Handle anonymous access to accounts I'm the definitive
# auth for.
env['swift.authorize'] = self.authorize
env['swift.clean_acl'] = clean_acl
elif self.reseller_prefix == '':
# Because I'm not certain if I'm the definitive auth, I won't
# overwrite swift.authorize.
if 'swift.authorize' not in env:
env['swift.authorize'] = self.authorize
env['swift.clean_acl'] = clean_acl
else:
# Not my token, not my account, I can't authorize this request,
# deny all is a good idea if not already set...
if 'swift.authorize' not in env:
env['swift.authorize'] = self.denied_response
return self.app(env, start_response)
def _is_definitive_auth(self, path):
"""
Determine if we are the definitive auth
Determines if we are the definitive auth for a given path.
If the account name is prefixed with something matching one
of the reseller_prefix items, then we are the auth (return True)
Non-matching: we are not the auth.
However, one of the reseller_prefix items can be blank. If
so, we cannot always be definite so return False.
:param path: A path (e.g., /v1/AUTH_joesaccount/c/o)
:return:True if we are definitive auth
"""
try:
version, account, rest = split_path(path, 1, 3, True)
except ValueError:
return False
if account:
return bool(self._get_account_prefix(account))
return False
def _non_empty_reseller_prefixes(self):
return iter([pre for pre in self.reseller_prefixes if pre != ''])
def _get_account_prefix(self, account):
"""
Get the prefix of an account
Determines which reseller prefix matches the account and returns
that prefix. If account does not start with one of the known
reseller prefixes, returns None.
:param account: Account name (e.g., AUTH_joesaccount) or None
:return: The prefix string (examples: 'AUTH_', 'SERVICE_', '')
If we can't match the prefix of the account, return None
"""
if account is None:
return None
# Empty prefix matches everything, so try to match others first
for prefix in self._non_empty_reseller_prefixes():
if account.startswith(prefix):
return prefix
if '' in self.reseller_prefixes:
return ''
return None
def _dot_account(self, account):
"""
Detect if account starts with dot character after the prefix
:param account: account in path (e.g., AUTH_joesaccount)
:return:True if name starts with dot character
"""
prefix = self._get_account_prefix(account)
return prefix is not None and account[len(prefix)] == '.'
def _get_user_groups(self, account, account_user, account_id):
"""
:param account: example: test
:param account_user: example: test:tester
:param account_id: example: AUTH_test
:return: a comma separated string of group names. The group names are
as follows: account,account_user,groups...
If .admin is in the groups, this is replaced by all the
possible account ids. For example, for user joe, account acct
and resellers AUTH_, OTHER_, the returned string is as
follows: acct,acct:joe,AUTH_acct,OTHER_acct
"""
groups = [account, account_user]
groups.extend(self.users[account_user]['groups'])
if '.admin' in groups:
groups.remove('.admin')
for prefix in self._non_empty_reseller_prefixes():
groups.append('%s%s' % (prefix, account))
if account_id not in groups:
groups.append(account_id)
groups = ','.join(groups)
return groups
def get_groups(self, env, token):
"""
Get groups for the given token.
:param env: The current WSGI environment dictionary.
:param token: Token to validate and return a group string for.
:returns: None if the token is invalid or a string containing a comma
separated list of groups the authenticated user is a member
of. The first group in the list is also considered a unique
identifier for that user.
"""
groups = None
memcache_client = cache_from_env(env)
if not memcache_client:
raise Exception('Memcache required')
memcache_token_key = '%s/token/%s' % (self.reseller_prefix, token)
cached_auth_data = memcache_client.get(memcache_token_key)
if cached_auth_data:
expires, groups = cached_auth_data
if expires < time():
groups = None
if env.get('HTTP_AUTHORIZATION'):
account_user, sign = \
env['HTTP_AUTHORIZATION'].split(' ')[1].rsplit(':', 1)
if account_user not in self.users:
return None
account, user = account_user.split(':', 1)
account_id = self.users[account_user]['url'].rsplit('/', 1)[-1]
path = env['PATH_INFO']
env['PATH_INFO'] = path.replace(account_user, account_id, 1)
msg = base64.urlsafe_b64decode(unquote(token))
key = self.users[account_user]['key']
s = base64.encodestring(hmac.new(key, msg, sha1).digest()).strip()
if s != sign:
return None
groups = self._get_user_groups(account, account_user, account_id)
return groups
def account_acls(self, req):
"""
Return a dict of ACL data from the account server via get_account_info.
Auth systems may define their own format, serialization, structure,
and capabilities implemented in the ACL headers and persisted in the
sysmeta data. However, auth systems are strongly encouraged to be
interoperable with Tempauth.
Account ACLs are set and retrieved via the header
X-Account-Access-Control
For header format and syntax, see:
* :func:`swift.common.middleware.acl.parse_acl()`
* :func:`swift.common.middleware.acl.format_acl()`
"""
info = get_account_info(req.environ, self.app, swift_source='TA')
try:
acls = acls_from_account_info(info)
except ValueError as e1:
self.logger.warn("Invalid ACL stored in metadata: %r" % e1)
return None
except NotImplementedError as e2:
self.logger.warn("ACL version exceeds middleware version: %r" % e2)
return None
return acls
def extract_acl_and_report_errors(self, req):
"""
Return a user-readable string indicating the errors in the input ACL,
or None if there are no errors.
"""
acl_header = 'x-account-access-control'
acl_data = req.headers.get(acl_header)
result = parse_acl(version=2, data=acl_data)
if result is None:
return 'Syntax error in input (%r)' % acl_data
tempauth_acl_keys = 'admin read-write read-only'.split()
for key in result:
# While it is possible to construct auth systems that collaborate
# on ACLs, TempAuth is not such an auth system. At this point,
# it thinks it is authoritative.
if key not in tempauth_acl_keys:
return 'Key %r not recognized' % key
for key in tempauth_acl_keys:
if key not in result:
continue
if not isinstance(result[key], list):
return 'Value for key %r must be a list' % key
for grantee in result[key]:
if not isinstance(grantee, str):
return 'Elements of %r list must be strings' % key
# Everything looks fine, no errors found
internal_hdr = get_sys_meta_prefix('account') + 'core-access-control'
req.headers[internal_hdr] = req.headers.pop(acl_header)
return None
def authorize(self, req):
"""
Returns None if the request is authorized to continue or a standard
WSGI response callable if not.
"""
try:
_junk, account, container, obj = req.split_path(1, 4, True)
except ValueError:
self.logger.increment('errors')
return HTTPNotFound(request=req)
if self._get_account_prefix(account) is None:
self.logger.debug("Account name: %s doesn't start with "
"reseller_prefix(s): %s."
% (account, ','.join(self.reseller_prefixes)))
return self.denied_response(req)
# At this point, TempAuth is convinced that it is authoritative.
# If you are sending an ACL header, it must be syntactically valid
# according to TempAuth's rules for ACL syntax.
acl_data = req.headers.get('x-account-access-control')
if acl_data is not None:
error = self.extract_acl_and_report_errors(req)
if error:
msg = 'X-Account-Access-Control invalid: %s\n\nInput: %s\n' % (
error, acl_data)
headers = [('Content-Type', 'text/plain; charset=UTF-8')]
return HTTPBadRequest(request=req, headers=headers, body=msg)
user_groups = (req.remote_user or '').split(',')
account_user = user_groups[1] if len(user_groups) > 1 else None
if '.reseller_admin' in user_groups and \
account not in self.reseller_prefixes and \
not self._dot_account(account):
req.environ['swift_owner'] = True
self.logger.debug("User %s has reseller admin authorizing."
% account_user)
return None
if account in user_groups and \
(req.method not in ('DELETE', 'PUT') or container):
# The user is admin for the account and is not trying to do an
# account DELETE or PUT
account_prefix = self._get_account_prefix(account)
require_group = self.account_rules.get(account_prefix).get(
'require_group')
if require_group and require_group in user_groups:
req.environ['swift_owner'] = True
self.logger.debug("User %s has admin and %s group."
" Authorizing." % (account_user,
require_group))
return None
elif not require_group:
req.environ['swift_owner'] = True
self.logger.debug("User %s has admin authorizing."
% account_user)
return None
if (req.environ.get('swift_sync_key')
and (req.environ['swift_sync_key'] ==
req.headers.get('x-container-sync-key', None))
and 'x-timestamp' in req.headers):
self.logger.debug("Allow request with container sync-key: %s."
% req.environ['swift_sync_key'])
return None
if req.method == 'OPTIONS':
#allow OPTIONS requests to proceed as normal
self.logger.debug("Allow OPTIONS request.")
return None
referrers, groups = parse_acl(getattr(req, 'acl', None))
if referrer_allowed(req.referer, referrers):
if obj or '.rlistings' in groups:
self.logger.debug("Allow authorizing %s via referer ACL."
% req.referer)
return None
for user_group in user_groups:
if user_group in groups:
self.logger.debug("User %s allowed in ACL: %s authorizing."
% (account_user, user_group))
return None
# Check for access via X-Account-Access-Control
acct_acls = self.account_acls(req)
if acct_acls:
# At least one account ACL is set in this account's sysmeta data,
# so we should see whether this user is authorized by the ACLs.
user_group_set = set(user_groups)
if user_group_set.intersection(acct_acls['admin']):
req.environ['swift_owner'] = True
self.logger.debug('User %s allowed by X-Account-Access-Control'
' (admin)' % account_user)
return None
if (user_group_set.intersection(acct_acls['read-write']) and
(container or req.method in ('GET', 'HEAD'))):
# The RW ACL allows all operations to containers/objects, but
# only GET/HEAD to accounts (and OPTIONS, above)
self.logger.debug('User %s allowed by X-Account-Access-Control'
' (read-write)' % account_user)
return None
if (user_group_set.intersection(acct_acls['read-only']) and
req.method in ('GET', 'HEAD')):
self.logger.debug('User %s allowed by X-Account-Access-Control'
' (read-only)' % account_user)
return None
return self.denied_response(req)
def denied_response(self, req):
"""
Returns a standard WSGI response callable with the status of 403 or 401
depending on whether the REMOTE_USER is set or not.
"""
if req.remote_user:
self.logger.increment('forbidden')
return HTTPForbidden(request=req)
else:
self.logger.increment('unauthorized')
return HTTPUnauthorized(request=req)
def handle(self, env, start_response):
"""
WSGI entry point for auth requests (ones that match the
self.auth_prefix).
Wraps env in swob.Request object and passes it down.
:param env: WSGI environment dictionary
:param start_response: WSGI callable
"""
try:
req = Request(env)
if self.auth_prefix:
req.path_info_pop()
req.bytes_transferred = '-'
req.client_disconnect = False
if 'x-storage-token' in req.headers and \
'x-auth-token' not in req.headers:
req.headers['x-auth-token'] = req.headers['x-storage-token']
return self.handle_request(req)(env, start_response)
except (Exception, Timeout):
print("EXCEPTION IN handle: %s: %s" % (format_exc(), env))
self.logger.increment('errors')
start_response('500 Server Error',
[('Content-Type', 'text/plain')])
return ['Internal server error.\n']
def handle_request(self, req):
"""
Entry point for auth requests (ones that match the self.auth_prefix).
Should return a WSGI-style callable (such as swob.Response).
:param req: swob.Request object
"""
req.start_time = time()
handler = None
try:
version, account, user, _junk = req.split_path(1, 4, True)
except ValueError:
self.logger.increment('errors')
return HTTPNotFound(request=req)
if version in ('v1', 'v1.0', 'auth'):
if req.method == 'GET':
handler = self.handle_get_token
if not handler:
self.logger.increment('errors')
req.response = HTTPBadRequest(request=req)
else:
req.response = handler(req)
return req.response
def handle_get_token(self, req):
"""
Handles the various `request for token and service end point(s)` calls.
There are various formats to support the various auth servers in the
past. Examples::
GET <auth-prefix>/v1/<act>/auth
X-Auth-User: <act>:<usr> or X-Storage-User: <usr>
X-Auth-Key: <key> or X-Storage-Pass: <key>
GET <auth-prefix>/auth
X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr>
X-Auth-Key: <key> or X-Storage-Pass: <key>
GET <auth-prefix>/v1.0
X-Auth-User: <act>:<usr> or X-Storage-User: <act>:<usr>
X-Auth-Key: <key> or X-Storage-Pass: <key>
On successful authentication, the response will have X-Auth-Token and
X-Storage-Token set to the token to use with Swift and X-Storage-URL
set to the URL to the default Swift cluster to use.
:param req: The swob.Request to process.
:returns: swob.Response, 2xx on success with data set as explained
above.
"""
# Validate the request info
try:
pathsegs = split_path(req.path_info, 1, 3, True)
except ValueError:
self.logger.increment('errors')
return HTTPNotFound(request=req)
if pathsegs[0] == 'v1' and pathsegs[2] == 'auth':
account = pathsegs[1]
user = req.headers.get('x-storage-user')
if not user:
user = req.headers.get('x-auth-user')
if not user or ':' not in user:
self.logger.increment('token_denied')
return HTTPUnauthorized(request=req, headers=
{'Www-Authenticate':
'Swift realm="%s"' % account})
account2, user = user.split(':', 1)
if account != account2:
self.logger.increment('token_denied')
return HTTPUnauthorized(request=req, headers=
{'Www-Authenticate':
'Swift realm="%s"' % account})
key = req.headers.get('x-storage-pass')
if not key:
key = req.headers.get('x-auth-key')
elif pathsegs[0] in ('auth', 'v1.0'):
user = req.headers.get('x-auth-user')
if not user:
user = req.headers.get('x-storage-user')
if not user or ':' not in user:
self.logger.increment('token_denied')
return HTTPUnauthorized(request=req, headers=
{'Www-Authenticate':
'Swift realm="unknown"'})
account, user = user.split(':', 1)
key = req.headers.get('x-auth-key')
if not key:
key = req.headers.get('x-storage-pass')
else:
return HTTPBadRequest(request=req)
if not all((account, user, key)):
self.logger.increment('token_denied')
realm = account or 'unknown'
return HTTPUnauthorized(request=req, headers={'Www-Authenticate':
'Swift realm="%s"' %
realm})
# Authenticate user
account_user = account + ':' + user
if account_user not in self.users:
self.logger.increment('token_denied')
return HTTPUnauthorized(request=req, headers=
{'Www-Authenticate':
'Swift realm="%s"' % account})
if self.users[account_user]['key'] != key:
self.logger.increment('token_denied')
return HTTPUnauthorized(request=req, headers=
{'Www-Authenticate':
'Swift realm="unknown"'})
account_id = self.users[account_user]['url'].rsplit('/', 1)[-1]
# Get memcache client
memcache_client = cache_from_env(req.environ)
if not memcache_client:
raise Exception('Memcache required')
# See if a token already exists and hasn't expired
token = None
memcache_user_key = '%s/user/%s' % (self.reseller_prefix, account_user)
candidate_token = memcache_client.get(memcache_user_key)
if candidate_token:
memcache_token_key = \
'%s/token/%s' % (self.reseller_prefix, candidate_token)
cached_auth_data = memcache_client.get(memcache_token_key)
if cached_auth_data:
expires, old_groups = cached_auth_data
old_groups = old_groups.split(',')
new_groups = self._get_user_groups(account, account_user,
account_id)
if expires > time() and \
set(old_groups) == set(new_groups.split(',')):
token = candidate_token
# Create a new token if one didn't exist
if not token:
# Generate new token
token = '%stk%s' % (self.reseller_prefix, uuid4().hex)
expires = time() + self.token_life
groups = self._get_user_groups(account, account_user, account_id)
# Save token
memcache_token_key = '%s/token/%s' % (self.reseller_prefix, token)
memcache_client.set(memcache_token_key, (expires, groups),
time=float(expires - time()))
# Record the token with the user info for future use.
memcache_user_key = \
'%s/user/%s' % (self.reseller_prefix, account_user)
memcache_client.set(memcache_user_key, token,
time=float(expires - time()))
resp = Response(request=req, headers={
'x-auth-token': token, 'x-storage-token': token})
url = self.users[account_user]['url'].replace('$HOST', resp.host_url)
if self.storage_url_scheme != 'default':
url = self.storage_url_scheme + ':' + url.split(':', 1)[1]
resp.headers['x-storage-url'] = url
return resp
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
register_swift_info('tempauth', account_acls=True)
def auth_filter(app):
return TempAuth(app, conf)
return auth_filter
| 44.821107
| 79
| 0.585367
|
8d242c70b381c8c11b6335ea29fb4baa55404f4e
| 82
|
py
|
Python
|
exercicios/PythonExercicios/ex002.py
|
Roberto-Sartore/Python
|
98f91f13cf78d761893c4a1f3264ed999244d32b
|
[
"MIT"
] | null | null | null |
exercicios/PythonExercicios/ex002.py
|
Roberto-Sartore/Python
|
98f91f13cf78d761893c4a1f3264ed999244d32b
|
[
"MIT"
] | null | null | null |
exercicios/PythonExercicios/ex002.py
|
Roberto-Sartore/Python
|
98f91f13cf78d761893c4a1f3264ed999244d32b
|
[
"MIT"
] | null | null | null |
nome = input('qual é o seu nome?')
print('Olá', nome, '. Prazer em te conhecer!')
| 27.333333
| 46
| 0.634146
|
1566a8f3245e63b1af574332b7a46f6698728784
| 6,733
|
py
|
Python
|
templates/management-api/authenticator.py
|
pawankaushal/crossbar-examples
|
b6e0cc321bad020045c4fafec091f78abd938618
|
[
"Apache-2.0"
] | null | null | null |
templates/management-api/authenticator.py
|
pawankaushal/crossbar-examples
|
b6e0cc321bad020045c4fafec091f78abd938618
|
[
"Apache-2.0"
] | null | null | null |
templates/management-api/authenticator.py
|
pawankaushal/crossbar-examples
|
b6e0cc321bad020045c4fafec091f78abd938618
|
[
"Apache-2.0"
] | null | null | null |
###############################################################################
##
## Copyright (C) Tavendo GmbH and/or collaborators. All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## 1. Redistributions of source code must retain the above copyright notice,
## this list of conditions and the following disclaimer.
##
## 2. Redistributions in binary form must reproduce the above copyright notice,
## this list of conditions and the following disclaimer in the documentation
## and/or other materials provided with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
## IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
## ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
## LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
## CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
## SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
## INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
## CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
## ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
## POSSIBILITY OF SUCH DAMAGE.
##
###############################################################################
import os
from pprint import pprint
from twisted.internet.defer import inlineCallbacks, returnValue
from autobahn.twisted.wamp import ApplicationSession
from autobahn.wamp.exception import ApplicationError
from autobahn.twisted.util import sleep
class AuthenticatorSession(ApplicationSession):
@inlineCallbacks
def onJoin(self, details):
@inlineCallbacks
def authenticate(realm, authid, details):
print("WAMP-Anonymous dynamic authenticator invoked: realm='{}', authid='{}'".format(realm, authid))
realm = realm or 'default'
controller = self.config.controller
worker = details['worker']
realm_id = 'realm-{}'.format(realm)
role = 'public'
# crossbar.node.corei7ub1310.worker.worker-001.is_router_realm_running
is_running = yield controller.call('{}.is_router_realm_running'.format(worker), realm_id)
if is_running:
self.log.info("Realm {realm} ALREADY RUNNING", realm=realm)
else:
self.log.info("Realm {realm} NOT RUNNING .. starting", realm=realm)
realm_config = {
"name": realm,
"roles": [
{
"name": "public",
"permissions": [
{
"uri": "",
"match": "prefix",
"allow": {
"call": True,
"register": True,
"publish": True,
"subscribe": True
},
"cache": True
}
]
}
]
}
# crossbar.node.corei7ub1310.worker.worker-001.start_router_realm
try:
yield self.config.controller.call('{}.start_router_realm'.format(worker), realm_id, realm_config)
except Exception as e:
self.log.error("REALM CREATION FAILED")
self.log.error(e)
else:
self.log.info("REALM CREATED")
role_id = 'role-{}-{}'.format(realm, role)
role_config = {
"name": role,
"permissions": [
{
"uri": "",
"match": "prefix",
"allow": {
"call": True,
"register": True,
"publish": True,
"subscribe": True
},
"cache": True
}
]
}
# crossbar.node.corei7ub1310.worker.worker-001.start_router_realm_role
try:
yield self.config.controller.call('{}.start_router_realm_role'.format(worker), realm_id, role_id, role_config)
except Exception as e:
self.log.error("ROLE CREATION FAILED")
self.log.error(e)
else:
self.log.info("ROLE CREATED")
container_id = 'backend-{}'.format(realm)
container_options = {
"pythonpath": [".."]
}
node_id = 'thinkpad-t430s'
try:
yield self.config.controller.call('crossbar.node.{}.start_container'.format(node_id), container_id, container_options)
except Exception as e:
self.log.error("CONTAINER CREATION FAILED")
self.log.error(e)
else:
self.log.info("CONTAINER CREATED")
component_id = 'backend-{}'.format(realm)
component_config = {
"type": "class",
"classname": "backend.Backend",
"realm": realm,
"transport": {
"type": "websocket",
"endpoint": {
"type": "tcp",
"host": "localhost",
"port": 8080
},
"url": "ws://localhost:8080/ws"
}
}
# crossbar.node.corei7ub1310.worker.backend-realm1.start_container_component
try:
yield self.config.controller.call('crossbar.node.{}.worker.{}.start_container_component'.format(node_id, container_id), component_id, component_config)
except Exception as e:
self.log.error("COMPONENT CREATION FAILED")
self.log.error(e)
else:
self.log.info("COMPONENT CREATED")
principal = {
'realm': realm,
'role': role,
'extra': {
'eins': 'zwo',
'drei': [4, 5, 6]
}
}
self.log.info("Authenticator finished")
returnValue(principal)
try:
yield self.register(authenticate, 'com.example.authenticate')
print("WAMP-Anonymous dynamic authenticator registered!")
except Exception as e:
print("Failed to register dynamic authenticator: {0}".format(e))
| 37.198895
| 166
| 0.526511
|
97594206dcb522d582e93b011e631dbe9b6f8f90
| 2,970
|
py
|
Python
|
fancy/config/config_loaders.py
|
susautw/fancy-config
|
25a3bd51a40df071d00327640caa05b6288bd970
|
[
"MIT"
] | 1
|
2022-03-17T04:47:52.000Z
|
2022-03-17T04:47:52.000Z
|
fancy/config/config_loaders.py
|
susautw/fancy-config
|
25a3bd51a40df071d00327640caa05b6288bd970
|
[
"MIT"
] | 9
|
2021-12-01T08:01:52.000Z
|
2022-03-16T13:05:07.000Z
|
fancy/config/config_loaders.py
|
susautw/fancy-config
|
25a3bd51a40df071d00327640caa05b6288bd970
|
[
"MIT"
] | 1
|
2022-03-07T09:29:06.000Z
|
2022-03-07T09:29:06.000Z
|
from argparse import Namespace
from pathlib import Path
from typing import Dict, TYPE_CHECKING, Union
import yaml
from abc import ABC, abstractmethod
from . import attribute_setters
if TYPE_CHECKING:
from ..config import BaseConfig
setter_name_map = {}
for name, x in vars(attribute_setters).items():
if isinstance(x, type) and issubclass(x, attribute_setters.AttributeSetter):
try:
name = x.get_setter_name()
setter_name_map[name] = x()
except NotImplementedError:
pass
class BaseConfigLoader(ABC):
_attribute_setter: attribute_setters.AttributeSetter
def __init__(self, setter: Union[attribute_setters.AttributeSetter, str] = None):
setter = "strict" if setter is None else setter
if isinstance(setter, str):
self._attribute_setter = setter_name_map[setter]
else:
self._attribute_setter = setter
@abstractmethod
def load(self, config: 'BaseConfig') -> 'BaseConfig':
...
@abstractmethod
def get_sub_loader(self, val) -> "BaseConfigLoader":
...
def get_setter(self) -> attribute_setters.AttributeSetter:
return self._attribute_setter
class DictBasedConfigLoader(BaseConfigLoader, ABC):
@abstractmethod
def get_dict(self) -> Dict:
pass
def load(self, config: 'BaseConfig'):
for key, value in self.get_dict().items():
self.get_setter().set(config, key, value)
def get_sub_loader(self, val) -> "BaseConfigLoader":
return DictConfigLoader(val, self._attribute_setter)
class PathBasedConfigLoader(BaseConfigLoader, ABC):
_path: Path
def __init__(self, path: Union[Path, str], setter: Union[attribute_setters.AttributeSetter, str] = None):
super().__init__(setter)
self._path = path if isinstance(path, Path) else Path(path)
@property
def path(self) -> Path:
return self._path
@path.setter
def path(self, path: Path) -> None:
self._path = path
class YamlConfigLoader(DictBasedConfigLoader, PathBasedConfigLoader):
def get_dict(self) -> Dict:
if not self.path.is_file():
raise FileNotFoundError(str(self.path))
stream = self.path.open()
data = yaml.safe_load(stream)
if data is None:
data = {}
stream.close()
return data
class DictConfigLoader(DictBasedConfigLoader):
_dict: Dict
def __init__(self, _dict: Dict, setter: Union[attribute_setters.AttributeSetter, str] = None):
super().__init__(setter)
self._dict = _dict
def get_dict(self) -> Dict:
return self._dict
class NamespaceConfigLoader(DictBasedConfigLoader):
_args: Namespace
def __init__(self, args: Namespace, setter: Union[attribute_setters.AttributeSetter, str] = None):
super().__init__(setter)
self._args = args
def get_dict(self) -> Dict:
return vars(self._args)
| 27
| 109
| 0.66734
|
f71e2582319771768abac221ac3650e185cfa10e
| 3,198
|
py
|
Python
|
patterns.py
|
teddy-dubal/WhatsApp-Analyzer
|
1cae2f3be03843741e2b8f5e321ed7a307f3b1f1
|
[
"MIT"
] | null | null | null |
patterns.py
|
teddy-dubal/WhatsApp-Analyzer
|
1cae2f3be03843741e2b8f5e321ed7a307f3b1f1
|
[
"MIT"
] | null | null | null |
patterns.py
|
teddy-dubal/WhatsApp-Analyzer
|
1cae2f3be03843741e2b8f5e321ed7a307f3b1f1
|
[
"MIT"
] | null | null | null |
BAD_CHARS = [
u"\u202a",
u"\u200e",
u"\u202c",
u"\xa0",
]
IS_STARTING_LINE = r"""
(\[?) #Zero or one open square bracket '['
(((\d{1,2}) #1 to 2 digit date
(/|-) #'/' or '-' separator
(\d{1,2}) #1 to 2 digit month
(/|-) #'/' or '-' separator
(\d{2,4})) #2 to 4 digit of year
# ([à ]+)
(,?\s|[à ]+) #Zero or one comma ',' and ingle space
((\d{1,2}) #1 to 2 digit of hour
(:|\.) #Colon ':' or dot '.' separator
(\d{2}) #2 digit of minute
(\.|:)? #Zero or one of dot '.' or colon ':'
(\d{2})? #Zero or one of 2 digits of second
(\s?[apAP]\.?[mM]\.?)?)) #Zero or one of ('space', 'A' or 'P', and 'M'
(\]?\s-?\s?\s?)#Zero or one close square bracket ']', Zero or one (space and '-'), zero or one space
(.+) #One or more character of chat member phone number or contact name
"""
IS_CHAT = r"""
([^:]+)#Chat member
(:) #Colon separator
(.+) #One or more charachter of message content
"""
IS_DELETED_CHAT = [
r".*This message was deleted$",
r".*Pesan ini telah dihapus$"
]
IS_ATTACHMENT = [
r".*<Media omitted>$", # English version of android attachment
r".*<Media tidak disertakan>$", # Indonesia version of android attachment
r".*Archivo omitido*", # Spanish version of android attachment
r".*Pesan tidak didukung$", # Some device not recognize sticker attachment
# Indonesian version of android contact card,
r".+\.vcf \(file\sterlampir\)$",
# Indonesian version of android contact card,
r".+\.vcf \(file\sattached\)$",
r".*image omitted$",
r".*video omitted$",
r".*document omitted$",
r".*Contact card omitted$",
r".*audio omitted$",
r".*GIF omitted$",
r".*sticker omitted$",
r".*imagen omitida*",
r".*audio omitido*",
r".*GIF omitido*",
r".*sticker omitido*",
r".*video omitido*",
]
IS_URL = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,6}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))"
IS_EVENT = [
r"Les messages et les appels sont chiffrés de bout en bout. Aucun tiers, pas même WhatsApp, ne peut les lire ou les écouter. Appuyez pour en savoir plus.\.$", # EN
# Welcoming message
r"Messages to this group are now secured with end-to-end encryption\.$", # EN
# User created group
r".+\screated this group$", # EN
# User left group
r".+\sleft$", # EN
r".+\skeluar$", # ID
# User join group via inviation link
r".+\sjoined using this group's invite link$", # EN
r".+\stelah bergabung menggunakan tautan undangan grup ini$", # ID
# Admin adds member
r".+\sadded\s.+", # EN
r".+\smenambahkan\s.+", # ID
# Admin removes member
r".+\sremoved\s.+", # EN
# Member's security code changed
r".+'s security code changed\.$", # EN
# Member changes phone number
r".*changed their phone number to a new number. Tap to message or add the new number\.$" # EN
r".*telah mengganti nomor teleponnya ke nomor baru. Ketuk untuk mengirim pesan atau menambahkan nomor baru\.$", # ID
]
| 36.758621
| 194
| 0.551907
|
0fc670920c55b19f8d6ed01310b38e6a1ed18075
| 2,547
|
py
|
Python
|
tools/keys_info.py
|
alecBeaton/dev-alec
|
e39f25c31df365b9f04378d74f84dea415586a5a
|
[
"BSD-3-Clause"
] | null | null | null |
tools/keys_info.py
|
alecBeaton/dev-alec
|
e39f25c31df365b9f04378d74f84dea415586a5a
|
[
"BSD-3-Clause"
] | null | null | null |
tools/keys_info.py
|
alecBeaton/dev-alec
|
e39f25c31df365b9f04378d74f84dea415586a5a
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
from am_defines import *
minAesKeyIdx = 8
maxAesKeyIdx = 15
minHmacKeyIdx = 8
maxHmacKeyIdx = 15
###### Following are just dummy keys - Should be substituted with real keys #######
keyTblAes = [
# Info0 Keys - Starting at index 8
0xAC, 0xCC, 0x35, 0x08, 0x5B, 0x40, 0x3E, 0x8D, 0xEA, 0x02, 0xBE, 0xDD, 0x38, 0x0C, 0x02, 0x61,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA, 0xAA,
0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55, 0x55,
0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11, 0x11,
0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5, 0xA5,
0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66,
0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE,
]
keyTblHmac = [
# Info0 Keys - Starting at index 8
0xDE, 0xF5, 0xF6, 0x2F, 0x88, 0xEB, 0xB4, 0xA2, 0xC5, 0x23, 0xC5, 0x7E, 0x21, 0x54, 0x9A, 0x98, 0x8F, 0x93, 0xB2, 0x80, 0x34, 0x07, 0xBA, 0x4D, 0xF7, 0x02, 0x5E, 0xFF, 0xF7, 0x92, 0xE6, 0xDB,
0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55, 0xAA, 0x55,
0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE,
]
custKey = [
0xD5, 0x10, 0xBA, 0x4F, 0xE9, 0x23, 0xAA, 0x6B, 0xC9, 0x9F, 0x29, 0xB7, 0xA7, 0xAF, 0xF7, 0x65,
]
# These are dummy values. Contact AMBIQ to get the real Recovery Key
recoveryKey = [
0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE, 0xEF, 0xBE, 0xAD, 0xDE,
]
###################################################################################
wrapKey = custKey
minWrapMode = AM_SECBOOT_KEYWRAP_NONE
INFO_KEY = 0xd894e09e
FLASH_KEY = 0x12344321
| 54.191489
| 199
| 0.599136
|
055554d445b621c0433cec965b020e10d09e9d96
| 1,416
|
py
|
Python
|
python3-virtualenv/Lib/python3.6/site-packages/gunicorn/http/parser.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | null | null | null |
python3-virtualenv/Lib/python3.6/site-packages/gunicorn/http/parser.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | null | null | null |
python3-virtualenv/Lib/python3.6/site-packages/gunicorn/http/parser.py
|
LindaNayeli104/mlh-orientation-hackathon-project
|
d86b58f76721a9d5f3374399bfc6d3b1445d16ca
|
[
"MIT"
] | 1
|
2021-06-20T19:28:37.000Z
|
2021-06-20T19:28:37.000Z
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from gunicorn.http.message import Request
from gunicorn.http.unreader import SocketUnreader, IterUnreader
class Parser(object):
mesg_class = None
def __init__(self, cfg, source, source_addr):
self.cfg = cfg
if hasattr(source, "recv"):
self.unreader = SocketUnreader(source)
else:
self.unreader = IterUnreader(source)
self.mesg = None
self.source_addr = source_addr
# request counter (for keepalive connetions)
self.req_count = 0
def __iter__(self):
return self
def __next__(self):
# Stop if HTTP dictates a stop.
if self.mesg and self.mesg.should_close():
raise StopIteration()
# Discard any unread body of the previous message
if self.mesg:
data = self.mesg.body.read(8192)
while data:
data = self.mesg.body.read(8192)
# Parse the next request
self.req_count += 1
self.mesg = self.mesg_class(self.cfg, self.unreader, self.source_addr, self.req_count)
if not self.mesg:
raise StopIteration()
return self.mesg
next = __next__
class RequestParser(Parser):
mesg_class = Request
| 26.716981
| 95
| 0.601695
|
8ca6ed005846e41c51791cbbefab195bd8e757fd
| 1,038
|
py
|
Python
|
backend/src/exercise/urls.py
|
Rdani2005/helping-you2
|
772367ca6046ca578a6a951558f55fae2c974024
|
[
"MIT"
] | null | null | null |
backend/src/exercise/urls.py
|
Rdani2005/helping-you2
|
772367ca6046ca578a6a951558f55fae2c974024
|
[
"MIT"
] | null | null | null |
backend/src/exercise/urls.py
|
Rdani2005/helping-you2
|
772367ca6046ca578a6a951558f55fae2c974024
|
[
"MIT"
] | null | null | null |
# ------------------------ Libraries -------------------------
from django.urls import path
# ---------------------- Own Files -----------------------------
from . import views
# ------------------ Copyright ----------------------------------
__author__ = "Danny Sequeira"
__copyright__ = "Copyright (C) Danny Sequeira 2022"
# ------------------ URLs ------------------------------------------------
# TODO: Add all the API Urls
urlpatterns = [
# Get the routes for the Excersises API
path('', views.getRoutes, name="routes"),
# Get the mind exercises API
path('mental-excersises/', views.getMentalExercises, name="mind-exercises"),
# Get the mind physical API
path('physical-excersises/', views.getPhysicalExercises, name="physical-excersises"),
# Get the mind exercise API
path('mental-excersises/excercise/<str:pk>/', views.getMentalExcercise, name="mind-exercise"),
# Get the physical exercise API
path('physical-excersises/excercise/<str:pk>/', views.getPhysicalExcercise, name="physical-exercise"),
]
| 49.428571
| 105
| 0.572254
|
c9f5b9e058095ceee72c5da175822fbfc40a6448
| 369
|
py
|
Python
|
hello/hello.py
|
Test01DezWebSite/KattisDemos
|
f3768c18ea8e18b6a966d5cbb0df4f158a2f44fa
|
[
"MIT"
] | 1
|
2020-04-27T20:09:06.000Z
|
2020-04-27T20:09:06.000Z
|
hello/hello.py
|
Test01DezWebSite/KattisDemos
|
f3768c18ea8e18b6a966d5cbb0df4f158a2f44fa
|
[
"MIT"
] | null | null | null |
hello/hello.py
|
Test01DezWebSite/KattisDemos
|
f3768c18ea8e18b6a966d5cbb0df4f158a2f44fa
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Kattis - hello problem
import sys
def answer():
return "Hello World!"
def solve():
print(answer())
def test():
assert answer() == "Hello World!"
print('all test cases passed...')
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == 'test':
#print(sys.argv)
test()
else:
solve()
| 17.571429
| 51
| 0.569106
|
d1ba30ae477f2bd186d483f12e1043f6e6570b2c
| 1,137
|
py
|
Python
|
neweditor/public/bin/render.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | 3
|
2015-06-05T00:32:44.000Z
|
2017-01-06T15:44:32.000Z
|
neweditor/public/bin/render.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | null | null | null |
neweditor/public/bin/render.py
|
alexsigaras/SWIM
|
1a35df8acb26bdcb307a1b8f60e9feba68ed1715
|
[
"MIT"
] | null | null | null |
# # -*- coding: utf-8 -*-
# #############################################
# ## (C)opyright by Dirk Holtwick ##
# ## All rights reserved ##
# #############################################
# __version__ = "$Revision: 176 $"
# __author__ = "$Author: kgrodzicki $"
# __date__ = "$Date: 2011-01-15 10:11:47 +0100 (Fr, 15 July 2011) $"
# """
# HTML/CSS to PDF converter
# Test background image generation on the `portrait` and `landscape`
# page.
# """
# import sys, os
# sys.path.append(os.path.join("..", "include"))
# from cookbook import HTML2PDF
# if __name__ == "__main__":
# xhtml = open(sys.argv[1])
# try:
# filename = sys.argv[2]
# if not len(filename):
# raise Exception
# except:
# filename = os.join(os.getcwd(), "files", sys.argv[1].split('.')[0] + '.pdf')
# #HTML2PDF(xhtml.read(), "/Users/morrishopkins/Dropbox/Spring2013/PLT/PLT-SLAMM/neweditor/public/files/file.pdf")
# os.remove(sys.argv[1])
# # def render(html_fn, filename):
# # #print html_fn
# # #xhtml = open(html_fn)
# # #print xhtml.read()
# # HTML2PDF(html_fn, filename)
| 32.485714
| 118
| 0.540897
|
1c210f910c33559272ebf90990989bf0c663ed33
| 7,256
|
py
|
Python
|
src/main/admin/db_util.py
|
bd2019us/uima-ducc
|
ffb4870d9db87b1580bd6053fc9eed622e4c8e9e
|
[
"Apache-2.0"
] | 1
|
2019-03-17T04:23:54.000Z
|
2019-03-17T04:23:54.000Z
|
src/main/admin/db_util.py
|
bd2019us/uima-ducc
|
ffb4870d9db87b1580bd6053fc9eed622e4c8e9e
|
[
"Apache-2.0"
] | null | null | null |
src/main/admin/db_util.py
|
bd2019us/uima-ducc
|
ffb4870d9db87b1580bd6053fc9eed622e4c8e9e
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# -----------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -----------------------------------------------------------------------
#!/usr/bin/env python
import os
# common routines for ducc_post_install and db_create
def addToCp(cp, lib):
return cp + ':' + lib
def execute(CMD):
print CMD
return os.system(CMD)
# --------------------------------------------------------------------------------
# these next methods are used to parse a table returned from cqlsh into a
# - header
# - dictionary of values for each row
# parse the header into a list of names
def parse_header(header):
ret = []
parts = header.split('|')
for p in parts:
ret.append(p.strip())
return ret
# parse a single line into a dictionary with key from the header and value from the line
def parse_line(header, line):
parts = line.split('|')
ret = {}
for k, v in zip(header, parts):
ret[k] = v.strip()
return ret
# parse a set of lines returned from cqlsh into a header and a list of dictionaries, one per line
# header_id is a sting we use to positively identify a header line
def parse(lines, header_id):
ret = []
header = []
for l in lines:
l = l.strip()
# print '[]', l
if ( l == '' ):
continue
if ( '---' in l ):
continue;
if ( 'rows)' in l ):
continue;
if ( header_id in l ):
header = parse_header(l)
continue
ret.append(parse_line(header, l))
return header, ret
# given a header and a collection of lines parsed by the utilities above, print a
# mostly un-ugly listing of the table retults
def format(header, lines):
# calculate max column widths
hlens = {}
for k in header:
hlens[k] = len(k)
for line in lines:
if ( not hlens.has_key(k) ):
hlens[k] = len(line[k])
else:
hlens[k] = max(len(line[k]), hlens[k])
# create a format string from the widths
fmt = ''
for k in header:
fmt = fmt + ' %' + str(hlens[k]) + 's'
# first the header
print fmt % tuple(header)
# now the rows
for line in lines:
l = []
for k in header:
l.append(line[k])
print fmt % tuple(l)
return
# end of row parsing utilities
# --------------------------------------------------------------------------------
def stop_database(pidfile):
print "Stopping the database."
CMD = ['kill', '-TERM', '`cat ' + pidfile + '`']
CMD = ' '.join(CMD)
execute(CMD)
def manual_config(DUCC_HOME, DUCC_HEAD):
print ''
print 'To manually configure the database edit', DUCC_HOME + '/cassandra-server/conf/casssandra.yaml'
print 'to Insure every occurance of DUCC_HEAD is replaced with', DUCC_HEAD, 'and every occurance'
print 'of DUCC_HOME is replaced with', DUCC_HOME + '.'
print ''
print 'Note that one occurance of DUCC_HEAD will be quoted: you must preserve these quotes, e.g. as "' + DUCC_HEAD + '".'
def update_cassandra_config(DUCC_HOME, DUCC_HEAD):
# Read cassandra.yaml and change the things necessary to configure it correctly
config = DUCC_HOME + '/cassandra-server/conf/cassandra.yaml'
f = open(config)
lines = []
for line in f:
if ( line.startswith('listen_address:') ):
line = line.strip();
print 'Database host is configured at', line
if ( not DUCC_HEAD in line ):
print 'Must reconfigure listen_address to', DUCC_HEAD
parts = line.strip().split(':')
old = parts[1].strip()
ch_head = "sed -i.bak s'/" + old + "/" + DUCC_HEAD + "'/ " + config
os.system(ch_head)
def configure_database(DUCC_HOME, DUCC_HEAD, java, db_autostart=True, db_host=None, db_user=None, db_pw=None, db_replication=None):
# for cassandra:
# in ducc_runtime/cassandra-server/conf we need to update cassandra.yaml to establish
# the data directories and db connection addresses
# Note this is a bootstrap routine and doesn't try to use common code that may depend on
# things being initialized correctly.
if ( db_pw == None ):
db_pw = raw_input("Enter database password OR 'bypass' to bypass database support:")
if ( db_pw == '' ):
print "Must enter a DB password or 'bypass' to continue."
return False
if ( db_pw == 'bypass' ):
print 'Database support will be bypassed'
return True
if(db_host == None):
db_host = DUCC_HEAD
db_host = db_host.split()[0]
print "database host: "+str(db_host)
if( db_autostart ):
if ( os.path.exists(DUCC_HOME + "/state/database/data") ):
print 'Database is already defined in', DUCC_HOME + '/database', '- but will try to rebuild.'
update_cassandra_config(DUCC_HOME, DUCC_HEAD)
here = os.getcwd()
os.chdir(DUCC_HOME + "/cassandra-server")
pidfile = DUCC_HOME + '/state/cassandra.pid'
consfile = DUCC_HOME + '/state/cassandra.configure.console'
print 'Starting the database. This might take a few moments if it is the first time.'
CMD = "bin/cassandra -p "+ pidfile + " > "+consfile+" 2>&1";
os.system(CMD);
print "Database is started. Waiting for initialization";
os.chdir(here)
else:
print "Database is not auto-managed.";
# Now start the db and create the schema
CLASSPATH = ''
CLASSPATH = addToCp(CLASSPATH, DUCC_HOME + '/lib/cassandra/*')
CLASSPATH = addToCp(CLASSPATH, DUCC_HOME + '/lib/guava/*')
CLASSPATH = addToCp(CLASSPATH, DUCC_HOME + '/lib/apache-log4j/*')
CLASSPATH = addToCp(CLASSPATH, DUCC_HOME + '/lib/uima-ducc/*')
CLASSPATH = addToCp(CLASSPATH, DUCC_HOME + '/apache-uima/apache-activemq/lib/*')
os.environ['CLASSPATH'] = CLASSPATH
print os.environ['CLASSPATH']
ret = True
CMD = [java, '-DDUCC_HOME=' + DUCC_HOME, 'org.apache.uima.ducc.database.DbCreate', db_host, db_user, db_pw]
if(db_replication != None):
CMD.append(db_replication)
CMD = ' '.join(CMD)
if ( execute(CMD) == 0 ):
print 'Database is initialized.'
else:
print 'Database schema could not be defined.'
ret = False
if( db_autostart ):
stop_database(pidfile)
return ret
| 34.884615
| 131
| 0.599779
|
3aba4c5a8fdaf02c3a8b96f422cf86d4cb9702b3
| 12,690
|
py
|
Python
|
sdk/core/azure-core/azure/core/rest/_helpers.py
|
lambertpan/azure-sdk-for-python
|
cc7e454fc04ec61cf021adeaac4fc0fc0a0805b2
|
[
"MIT"
] | null | null | null |
sdk/core/azure-core/azure/core/rest/_helpers.py
|
lambertpan/azure-sdk-for-python
|
cc7e454fc04ec61cf021adeaac4fc0fc0a0805b2
|
[
"MIT"
] | null | null | null |
sdk/core/azure-core/azure/core/rest/_helpers.py
|
lambertpan/azure-sdk-for-python
|
cc7e454fc04ec61cf021adeaac4fc0fc0a0805b2
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import codecs
import cgi
from json import dumps
try:
import collections.abc as collections
except ImportError:
import collections # type: ignore
from typing import (
Optional,
Union,
Mapping,
Sequence,
List,
Tuple,
IO,
Any,
Dict,
Iterable,
)
import xml.etree.ElementTree as ET
import six
try:
binary_type = str
from urlparse import urlparse # type: ignore
except ImportError:
binary_type = bytes # type: ignore
from urllib.parse import urlparse
from azure.core.serialization import AzureJSONEncoder
from ..utils._pipeline_transport_rest_shared import (
_format_parameters_helper,
_pad_attr_name,
_prepare_multipart_body_helper,
_serialize_request,
_format_data_helper,
)
################################### TYPES SECTION #########################
PrimitiveData = Optional[Union[str, int, float, bool]]
ParamsType = Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]]
HeadersType = Mapping[str, str]
FileContent = Union[str, bytes, IO[str], IO[bytes]]
FileType = Union[
Tuple[Optional[str], FileContent],
]
FilesType = Union[
Mapping[str, FileType],
Sequence[Tuple[str, FileType]]
]
ContentTypeBase = Union[str, bytes, Iterable[bytes]]
########################### HELPER SECTION #################################
def _verify_data_object(name, value):
if not isinstance(name, str):
raise TypeError(
"Invalid type for data name. Expected str, got {}: {}".format(
type(name), name
)
)
if value is not None and not isinstance(value, (str, bytes, int, float)):
raise TypeError(
"Invalid type for data value. Expected primitive type, got {}: {}".format(
type(name), name
)
)
def set_urlencoded_body(data, has_files):
body = {}
default_headers = {}
for f, d in data.items():
if not d:
continue
if isinstance(d, list):
for item in d:
_verify_data_object(f, item)
else:
_verify_data_object(f, d)
body[f] = d
if not has_files:
# little hacky, but for files we don't send a content type with
# boundary so requests / aiohttp etc deal with it
default_headers["Content-Type"] = "application/x-www-form-urlencoded"
return default_headers, body
def set_multipart_body(files):
formatted_files = {
f: _format_data_helper(d) for f, d in files.items() if d is not None
}
return {}, formatted_files
def set_xml_body(content):
headers = {}
bytes_content = ET.tostring(content, encoding="utf8")
body = bytes_content.replace(b"encoding='utf8'", b"encoding='utf-8'")
if body:
headers["Content-Length"] = str(len(body))
return headers, body
def _shared_set_content_body(content):
# type: (Any) -> Tuple[HeadersType, Optional[ContentTypeBase]]
headers = {} # type: HeadersType
if isinstance(content, ET.Element):
# XML body
return set_xml_body(content)
if isinstance(content, (str, bytes)):
headers = {}
body = content
if isinstance(content, six.string_types):
headers["Content-Type"] = "text/plain"
if body:
headers["Content-Length"] = str(len(body))
return headers, body
if isinstance(content, collections.Iterable):
return {}, content
return headers, None
def set_content_body(content):
headers, body = _shared_set_content_body(content)
if body is not None:
return headers, body
raise TypeError(
"Unexpected type for 'content': '{}'. ".format(type(content)) +
"We expect 'content' to either be str, bytes, or an Iterable"
)
def set_json_body(json):
# type: (Any) -> Tuple[Dict[str, str], Any]
body = dumps(json, cls=AzureJSONEncoder)
return {
"Content-Type": "application/json",
"Content-Length": str(len(body))
}, body
def lookup_encoding(encoding):
# type: (str) -> bool
# including check for whether encoding is known taken from httpx
try:
codecs.lookup(encoding)
return True
except LookupError:
return False
def get_charset_encoding(response):
# type: (...) -> Optional[str]
content_type = response.headers.get("Content-Type")
if not content_type:
return None
_, params = cgi.parse_header(content_type)
encoding = params.get('charset') # -> utf-8
if encoding is None or not lookup_encoding(encoding):
return None
return encoding
def decode_to_text(encoding, content):
# type: (Optional[str], bytes) -> str
if not content:
return ""
if encoding == "utf-8":
encoding = "utf-8-sig"
if encoding:
return content.decode(encoding)
return codecs.getincrementaldecoder("utf-8-sig")(errors="replace").decode(content)
class HttpRequestBackcompatMixin(object):
def __getattr__(self, attr):
backcompat_attrs = [
"files",
"data",
"multipart_mixed_info",
"query",
"body",
"format_parameters",
"set_streamed_data_body",
"set_text_body",
"set_xml_body",
"set_json_body",
"set_formdata_body",
"set_bytes_body",
"set_multipart_mixed",
"prepare_multipart_body",
"serialize",
]
attr = _pad_attr_name(attr, backcompat_attrs)
return self.__getattribute__(attr)
def __setattr__(self, attr, value):
backcompat_attrs = [
"multipart_mixed_info",
"files",
"data",
"body",
]
attr = _pad_attr_name(attr, backcompat_attrs)
super(HttpRequestBackcompatMixin, self).__setattr__(attr, value)
@property
def _multipart_mixed_info(self):
"""DEPRECATED: Information used to make multipart mixed requests.
This is deprecated and will be removed in a later release.
"""
try:
return self._multipart_mixed_info_val
except AttributeError:
return None
@_multipart_mixed_info.setter
def _multipart_mixed_info(self, val):
"""DEPRECATED: Set information to make multipart mixed requests.
This is deprecated and will be removed in a later release.
"""
self._multipart_mixed_info_val = val
@property
def _query(self):
"""DEPRECATED: Query parameters passed in by user
This is deprecated and will be removed in a later release.
"""
query = urlparse(self.url).query
if query:
return {p[0]: p[-1] for p in [p.partition("=") for p in query.split("&")]}
return {}
@property
def _body(self):
"""DEPRECATED: Body of the request. You should use the `content` property instead
This is deprecated and will be removed in a later release.
"""
return self._data
@_body.setter
def _body(self, val):
"""DEPRECATED: Set the body of the request
This is deprecated and will be removed in a later release.
"""
self._data = val
def _format_parameters(self, params):
"""DEPRECATED: Format the query parameters
This is deprecated and will be removed in a later release.
You should pass the query parameters through the kwarg `params`
instead.
"""
return _format_parameters_helper(self, params)
def _set_streamed_data_body(self, data):
"""DEPRECATED: Set the streamed request body.
This is deprecated and will be removed in a later release.
You should pass your stream content through the `content` kwarg instead
"""
if not isinstance(data, binary_type) and not any(
hasattr(data, attr) for attr in ["read", "__iter__", "__aiter__"]
):
raise TypeError(
"A streamable data source must be an open file-like object or iterable."
)
headers = self._set_body(content=data)
self._files = None
self.headers.update(headers)
def _set_text_body(self, data):
"""DEPRECATED: Set the text body
This is deprecated and will be removed in a later release.
You should pass your text content through the `content` kwarg instead
"""
headers = self._set_body(content=data)
self.headers.update(headers)
self._files = None
def _set_xml_body(self, data):
"""DEPRECATED: Set the xml body.
This is deprecated and will be removed in a later release.
You should pass your xml content through the `content` kwarg instead
"""
headers = self._set_body(content=data)
self.headers.update(headers)
self._files = None
def _set_json_body(self, data):
"""DEPRECATED: Set the json request body.
This is deprecated and will be removed in a later release.
You should pass your json content through the `json` kwarg instead
"""
headers = self._set_body(json=data)
self.headers.update(headers)
self._files = None
def _set_formdata_body(self, data=None):
"""DEPRECATED: Set the formrequest body.
This is deprecated and will be removed in a later release.
You should pass your stream content through the `files` kwarg instead
"""
if data is None:
data = {}
content_type = self.headers.pop("Content-Type", None) if self.headers else None
if content_type and content_type.lower() == "application/x-www-form-urlencoded":
headers = self._set_body(data=data)
self._files = None
else: # Assume "multipart/form-data"
headers = self._set_body(files=data)
self._data = None
self.headers.update(headers)
def _set_bytes_body(self, data):
"""DEPRECATED: Set the bytes request body.
This is deprecated and will be removed in a later release.
You should pass your bytes content through the `content` kwarg instead
"""
headers = self._set_body(content=data)
# we don't want default Content-Type
# in 2.7, byte strings are still strings, so they get set with text/plain content type
headers.pop("Content-Type", None)
self.headers.update(headers)
self._files = None
def _set_multipart_mixed(self, *requests, **kwargs):
"""DEPRECATED: Set the multipart mixed info.
This is deprecated and will be removed in a later release.
"""
self.multipart_mixed_info = (
requests,
kwargs.pop("policies", []),
kwargs.pop("boundary", None),
kwargs
)
def _prepare_multipart_body(self, content_index=0):
"""DEPRECATED: Prepare your request body for multipart requests.
This is deprecated and will be removed in a later release.
"""
return _prepare_multipart_body_helper(self, content_index)
def _serialize(self):
"""DEPRECATED: Serialize this request using application/http spec.
This is deprecated and will be removed in a later release.
:rtype: bytes
"""
return _serialize_request(self)
| 33.84
| 94
| 0.626005
|
bc06f621e6014a4e589c55d92813fa023b3e35cd
| 10,489
|
py
|
Python
|
ansible/venv/lib/python2.7/site-packages/ansible/modules/remote_management/redfish/redfish_command.py
|
gvashchenkolineate/gvashchenkolineate_infra_trytravis
|
0fb18850afe0d8609693ba4b23f29c7cda17d97f
|
[
"MIT"
] | 17
|
2017-06-07T23:15:01.000Z
|
2021-08-30T14:32:36.000Z
|
ansible/venv/lib/python2.7/site-packages/ansible/modules/remote_management/redfish/redfish_command.py
|
gvashchenkolineate/gvashchenkolineate_infra_trytravis
|
0fb18850afe0d8609693ba4b23f29c7cda17d97f
|
[
"MIT"
] | 9
|
2017-06-25T03:31:52.000Z
|
2021-05-17T23:43:12.000Z
|
ansible/venv/lib/python2.7/site-packages/ansible/modules/remote_management/redfish/redfish_command.py
|
gvashchenkolineate/gvashchenkolineate_infra_trytravis
|
0fb18850afe0d8609693ba4b23f29c7cda17d97f
|
[
"MIT"
] | 3
|
2018-05-26T21:31:22.000Z
|
2019-09-28T17:00:45.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017-2018 Dell EMC Inc.
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: redfish_command
version_added: "2.7"
short_description: Manages Out-Of-Band controllers using Redfish APIs
description:
- Builds Redfish URIs locally and sends them to remote OOB controllers to
perform an action.
- Manages OOB controller ex. reboot, log management.
- Manages OOB controller users ex. add, remove, update.
- Manages system power ex. on, off, graceful and forced reboot.
options:
category:
required: true
description:
- Category to execute on OOB controller
type: str
command:
required: true
description:
- List of commands to execute on OOB controller
type: list
baseuri:
required: true
description:
- Base URI of OOB controller
type: str
username:
required: true
description:
- User for authentication with OOB controller
type: str
version_added: "2.8"
password:
required: true
description:
- Password for authentication with OOB controller
type: str
id:
required: false
description:
- ID of user to add/delete/modify
type: str
version_added: "2.8"
new_username:
required: false
description:
- name of user to add/delete/modify
type: str
version_added: "2.8"
new_password:
required: false
description:
- password of user to add/delete/modify
type: str
version_added: "2.8"
roleid:
required: false
description:
- role of user to add/delete/modify
type: str
version_added: "2.8"
bootdevice:
required: false
description:
- bootdevice when setting boot configuration
type: str
timeout:
description:
- Timeout in seconds for URL requests to OOB controller
default: 10
type: int
version_added: '2.8'
uefi_target:
required: false
description:
- UEFI target when bootdevice is "UefiTarget"
type: str
version_added: "2.9"
boot_next:
required: false
description:
- BootNext target when bootdevice is "UefiBootNext"
type: str
version_added: "2.9"
author: "Jose Delarosa (@jose-delarosa)"
'''
EXAMPLES = '''
- name: Restart system power gracefully
redfish_command:
category: Systems
command: PowerGracefulRestart
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Set one-time boot device to {{ bootdevice }}
redfish_command:
category: Systems
command: SetOneTimeBoot
bootdevice: "{{ bootdevice }}"
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Set one-time boot device to UefiTarget of "/0x31/0x33/0x01/0x01"
redfish_command:
category: Systems
command: SetOneTimeBoot
bootdevice: "UefiTarget"
uefi_target: "/0x31/0x33/0x01/0x01"
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Set one-time boot device to BootNext target of "Boot0001"
redfish_command:
category: Systems
command: SetOneTimeBoot
bootdevice: "UefiBootNext"
boot_next: "Boot0001"
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Set chassis indicator LED to blink
redfish_command:
category: Chassis
command: IndicatorLedBlink
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
- name: Add and enable user
redfish_command:
category: Accounts
command: AddUser,EnableUser
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
id: "{{ id }}"
new_username: "{{ new_username }}"
new_password: "{{ new_password }}"
roleid: "{{ roleid }}"
- name: Disable and delete user
redfish_command:
category: Accounts
command: ["DisableUser", "DeleteUser"]
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
id: "{{ id }}"
- name: Update user password
redfish_command:
category: Accounts
command: UpdateUserPassword
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
id: "{{ id }}"
new_password: "{{ new_password }}"
- name: Clear Manager Logs with a timeout of 20 seconds
redfish_command:
category: Manager
command: ClearLogs
baseuri: "{{ baseuri }}"
username: "{{ username }}"
password: "{{ password }}"
timeout: 20
'''
RETURN = '''
msg:
description: Message with action result or error description
returned: always
type: str
sample: "Action was successful"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.redfish_utils import RedfishUtils
from ansible.module_utils._text import to_native
# More will be added as module features are expanded
CATEGORY_COMMANDS_ALL = {
"Systems": ["PowerOn", "PowerForceOff", "PowerForceRestart", "PowerGracefulRestart",
"PowerGracefulShutdown", "PowerReboot", "SetOneTimeBoot"],
"Chassis": ["IndicatorLedOn", "IndicatorLedOff", "IndicatorLedBlink"],
"Accounts": ["AddUser", "EnableUser", "DeleteUser", "DisableUser",
"UpdateUserRole", "UpdateUserPassword"],
"Manager": ["GracefulRestart", "ClearLogs"],
}
def main():
result = {}
module = AnsibleModule(
argument_spec=dict(
category=dict(required=True),
command=dict(required=True, type='list'),
baseuri=dict(required=True),
username=dict(required=True),
password=dict(required=True, no_log=True),
id=dict(),
new_username=dict(),
new_password=dict(no_log=True),
roleid=dict(),
bootdevice=dict(),
timeout=dict(type='int', default=10),
uefi_target=dict(),
boot_next=dict()
),
supports_check_mode=False
)
category = module.params['category']
command_list = module.params['command']
# admin credentials used for authentication
creds = {'user': module.params['username'],
'pswd': module.params['password']}
# user to add/modify/delete
user = {'userid': module.params['id'],
'username': module.params['new_username'],
'userpswd': module.params['new_password'],
'userrole': module.params['roleid']}
# timeout
timeout = module.params['timeout']
# Build root URI
root_uri = "https://" + module.params['baseuri']
rf_utils = RedfishUtils(creds, root_uri, timeout, module)
# Check that Category is valid
if category not in CATEGORY_COMMANDS_ALL:
module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys())))
# Check that all commands are valid
for cmd in command_list:
# Fail if even one command given is invalid
if cmd not in CATEGORY_COMMANDS_ALL[category]:
module.fail_json(msg=to_native("Invalid Command '%s'. Valid Commands = %s" % (cmd, CATEGORY_COMMANDS_ALL[category])))
# Organize by Categories / Commands
if category == "Accounts":
ACCOUNTS_COMMANDS = {
"AddUser": rf_utils.add_user,
"EnableUser": rf_utils.enable_user,
"DeleteUser": rf_utils.delete_user,
"DisableUser": rf_utils.disable_user,
"UpdateUserRole": rf_utils.update_user_role,
"UpdateUserPassword": rf_utils.update_user_password
}
# execute only if we find an Account service resource
result = rf_utils._find_accountservice_resource()
if result['ret'] is False:
module.fail_json(msg=to_native(result['msg']))
for command in command_list:
result = ACCOUNTS_COMMANDS[command](user)
elif category == "Systems":
# execute only if we find a System resource
result = rf_utils._find_systems_resource()
if result['ret'] is False:
module.fail_json(msg=to_native(result['msg']))
for command in command_list:
if "Power" in command:
result = rf_utils.manage_system_power(command)
elif command == "SetOneTimeBoot":
result = rf_utils.set_one_time_boot_device(
module.params['bootdevice'],
module.params['uefi_target'],
module.params['boot_next'])
elif category == "Chassis":
result = rf_utils._find_chassis_resource()
if result['ret'] is False:
module.fail_json(msg=to_native(result['msg']))
led_commands = ["IndicatorLedOn", "IndicatorLedOff", "IndicatorLedBlink"]
# Check if more than one led_command is present
num_led_commands = sum([command in led_commands for command in command_list])
if num_led_commands > 1:
result = {'ret': False, 'msg': "Only one IndicatorLed command should be sent at a time."}
else:
for command in command_list:
if command in led_commands:
result = rf_utils.manage_indicator_led(command)
elif category == "Manager":
MANAGER_COMMANDS = {
"GracefulRestart": rf_utils.restart_manager_gracefully,
"ClearLogs": rf_utils.clear_logs
}
# execute only if we find a Manager service resource
result = rf_utils._find_managers_resource()
if result['ret'] is False:
module.fail_json(msg=to_native(result['msg']))
for command in command_list:
result = MANAGER_COMMANDS[command]()
# Return data back or fail with proper message
if result['ret'] is True:
del result['ret']
changed = result.get('changed', True)
module.exit_json(changed=changed, msg='Action was successful')
else:
module.fail_json(msg=to_native(result['msg']))
if __name__ == '__main__':
main()
| 30.85
| 130
| 0.625322
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.