hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
8f7d7084909d6e9fff3a65e3652a09f7da33f703
| 442
|
py
|
Python
|
x_5_5.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
x_5_5.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | 1
|
2021-11-13T08:03:04.000Z
|
2021-11-13T08:03:04.000Z
|
x_5_5.py
|
ofl/kuku2
|
7247fb1862d917d23258ebe7a93dca5939433225
|
[
"MIT"
] | null | null | null |
# x_5_5
#
# 桃太郎(Human)と同じようにOnitaijiMemberを継承して「Dog、Monkey、Bird」の3つのクラスを作り
# 「犬は噛みつきました」「さるは引っ掻きました」「きじ はくちばしで突きました」と表示してください
class OnitaijiMember:
def __init__(self, name, hit_points):
self.name = name
self.hit_points = hit_points
class Human(OnitaijiMember):
def attack(self):
print(self.name + 'は刀で斬りかかりました')
members = []
members.append(Human('桃太郎', 1800))
for member in members:
member.attack()
| 19.217391
| 64
| 0.699095
|
112291aee2513ee1b3b2a5cf43839d135df2f365
| 22,659
|
py
|
Python
|
bbp/comps/fas.py
|
ZhangHCFJEA/bbp
|
33bd999cf8d719c49f9a904872c62f02eb5850d1
|
[
"BSD-3-Clause"
] | 28
|
2017-10-31T09:16:30.000Z
|
2022-02-28T23:44:29.000Z
|
bbp/comps/fas.py
|
ZhangHCFJEA/bbp
|
33bd999cf8d719c49f9a904872c62f02eb5850d1
|
[
"BSD-3-Clause"
] | 37
|
2017-05-23T15:15:35.000Z
|
2022-02-05T09:13:18.000Z
|
bbp/comps/fas.py
|
ZhangHCFJEA/bbp
|
33bd999cf8d719c49f9a904872c62f02eb5850d1
|
[
"BSD-3-Clause"
] | 26
|
2017-09-21T17:43:33.000Z
|
2021-11-29T06:34:30.000Z
|
#!/usr/bin/env python
"""
Copyright 2010-2017 University Of Southern California
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This module takes care of building a workflow using either user
choices interactively, or an option file containing all needed
parameters.
"""
from __future__ import division, print_function
# Import Python modules
import os
import sys
import shutil
import matplotlib as mpl
mpl.use('AGG', warn=False)
import pylab
import numpy as np
# Import Broadband modules
import bband_utils
import install_cfg
from station_list import StationList
# Import plot config file
import plot_config
def create_boore_asc2smc(control_file, input_file,
data_column, num_headers,
extension_string):
"""
This function creates the control file for the asc2smc converter tool
"""
ctl_file = open(control_file, 'w')
ctl_file.write("!Control file for ASC2SMC ! first line\n")
ctl_file.write("! Revision of program involving a change in the "
"control file on this date:\n")
ctl_file.write(" 02/02/12\n")
ctl_file.write("!Name of summary file:\n")
ctl_file.write(" asc2smc.sum\n")
ctl_file.write("!n2skip (-1=headers preceded by !; 0=no headers; "
"otherwise number of headers to skip)\n")
ctl_file.write(" %d\n" % (num_headers))
ctl_file.write("!write headers to smc file "
"(even if n2skip > 0)? (Y/N)\n")
ctl_file.write(" Y\n")
ctl_file.write("!sps (0.0 = obtain from input file)\n")
ctl_file.write(" 0\n")
ctl_file.write("!N columns to read, column number for "
"time and data columns \n")
ctl_file.write("! (for files made using blpadflt, period is in "
"column 1 and sd, pv, pa, rv, \n")
ctl_file.write("! aa are in columns 2, 3, 4, 5, 6, respectively)\n")
ctl_file.write("! Note: if sps .ne. 0.0, then column number for time "
"is ignored (but a placeholder is\n")
ctl_file.write("! still needed--e.g., 1 1 1 (read one column, which "
"contains the data; 1 20 1 would be the same)\n")
ctl_file.write("! But note: if the data are not in the first column, "
"but only the data column is to be read\n")
ctl_file.write("! (because sps will be used to establish "
"the time values),\n")
ctl_file.write("! then ncolumns must be the column corresponding to "
"the data. For example, assume that\n")
ctl_file.write("! the data are in column 3 and that columns 1 and 2 "
"contain time and some other variable, but\n")
ctl_file.write("! the time column is not to be used (perhaps because "
"accumulated error in creating the column\n")
ctl_file.write("! leads to a slight shift in the time values). "
"Then the input line should be:\n")
ctl_file.write("! 3 1 3\n")
ctl_file.write("!\n")
ctl_file.write("! This program assumes one data point per row; if "
"there are more points (as, for example,\n")
ctl_file.write("! in files with N points per line), "
"use the program wrapped2asc).\n")
ctl_file.write("!\n")
ctl_file.write(" 3 1 %d\n" % (data_column))
ctl_file.write("!Xfactr\n")
ctl_file.write(" 1.0\n")
ctl_file.write("!Read input format (used if the format is such that "
"the values are not separated by spaces,\n")
ctl_file.write("!in which case a free format cannot be "
"used for input)?\n")
ctl_file.write(" N\n")
ctl_file.write("!If yes, specify a format; if not, "
"still need a placeholder\n")
ctl_file.write(" (3e13.5)\n")
ctl_file.write("!For output, use old (standard) smc format or new\n")
ctl_file.write('!higher precision format. Specify "high" for\n')
ctl_file.write("!high precision; any other word defaults to standard\n")
ctl_file.write("!precision (but some word is needed as "
"a placeholder, even if\n")
ctl_file.write("!standard precision is desired).\n")
ctl_file.write(" high\n")
ctl_file.write("!String to append to input file name "
"for the output filename.\n")
ctl_file.write(" %s\n" % (extension_string))
ctl_file.write('!Input file name (time,data pairs; "stop" in any '
'column to quit):\n')
ctl_file.write("%s\n" % (input_file))
ctl_file.write("STOP\n")
ctl_file.close()
def create_boore_smc2fs2(control_file, input_file, name_string):
"""
This function creates the control file for the smc2fs2 FAS tool
"""
ctl_file = open(control_file, 'w')
ctl_file.write('!Control file for program SMC2FS2\n')
ctl_file.write('! Revision of program involving a change in the control '
'file on this date:\n')
ctl_file.write(' 03/10/10\n')
ctl_file.write('! As many comment lines as desired, each '
'starting with "!"\n')
ctl_file.write('! The string "pp:" indicates a new set '
'of processing parameters\n')
ctl_file.write('! to be applied to the following smc files. '
'The parameters are given on the\n')
ctl_file.write('! lines following "pp:", until the next "pp:" line '
'or until "stop" is \n')
ctl_file.write('! encountered.\n')
ctl_file.write('! NOTE: Use the tapers with caution, '
'choosing them so that important signal\n')
ctl_file.write('! is not reduced by the tapering. '
'This can be particularly a problem with \n')
ctl_file.write('! analog data from relatively small earthquakes '
'that triggered near the \n')
ctl_file.write('! S-wave arrival. \n')
ctl_file.write('!\n')
ctl_file.write('! -----------------------------------------'
'------------------------------------\n')
ctl_file.write('!\n')
ctl_file.write('! Meaning of smoothing input parameters\n')
ctl_file.write('!\n')
ctl_file.write('! NO SMOOTHING\n')
ctl_file.write('! itype = 0\n')
ctl_file.write('! SMOOTHING OVER EQUALLY SPACED FREQUENCIES\n')
ctl_file.write('! itype = 1: box weighting function\n')
ctl_file.write('! smooth_param = width of box weighting function (Hz)\n')
ctl_file.write('! itype = 2: triangular weighting function\n')
ctl_file.write('! smooth_param = width of triangular '
'weighting function (Hz)\n')
ctl_file.write('! SMOOTHING OVER LOGARITHMICALLY SPACED FREQUENCIES\n')
ctl_file.write('! itype = 3: box weighting function\n')
ctl_file.write('! smooth_param = xi, which is the fraction of '
'a decade for the\n')
ctl_file.write('! box weighting function \n')
ctl_file.write('! itype = 4: triangular weighting function\n')
ctl_file.write('! smooth_param = xi, which is the fraction of '
'a decade for the\n')
ctl_file.write('! triangular weighting function \n')
ctl_file.write('! itype = 5: Konno and Ohmachi weighting function '
'(see BSSA 88, 228-241)\n')
ctl_file.write('! smooth_param = xi, which is the fraction '
'of a decade for which\n')
ctl_file.write('! the Konno and Ohmachi weighting '
'function is greater\n')
ctl_file.write('! than 0.043.(it is related to\n')
ctl_file.write('! their smoothing parameter b '
'by the equation\n')
ctl_file.write('! b = 4.0/smooth_param, so we have '
'this correspondence between\n')
ctl_file.write('! b and smooth_param\n')
ctl_file.write('! b smooth_param \n')
ctl_file.write('! 10 0.40\n')
ctl_file.write('! 20 0.20\n')
ctl_file.write('! 40 0.10\n')
ctl_file.write('! \n')
ctl_file.write('! b = 40 seems to be commonly used, '
'but I do not think that it\n')
ctl_file.write('! gives enough smoothing; '
'I PREFER SMOOTH_PARAM = 0.2, \n')
ctl_file.write('! corresponding to b = 20. \n')
ctl_file.write('!\n')
ctl_file.write('! ipow = power of FAS to be smoothed '
'(2 = smoothing energy spectrum)\n')
ctl_file.write('!\n')
ctl_file.write('! df_smooth: Note: need df_smooth for '
'linearly-spaced smoothers, \n')
ctl_file.write('! and generally it should be the df from the fft. '
'For general x data, it is\n')
ctl_file.write('! the spacing between x values, assumed to be constant, '
'The reason for\n')
ctl_file.write('! including it as an input parameter is to "fool" the\n')
ctl_file.write('! program to do smoothing over a specified '
'number of points by\n')
ctl_file.write('! setting df_smooth = 1 and smooth_param = number '
'of points (including \n')
ctl_file.write('! points with zero weight at ends; e.g., '
'smooth_param = 5 will \n')
ctl_file.write('! give a smoother with weights 0, 1/4, 2/4, 1/4, 0; '
'smooth_param\n')
ctl_file.write('! should be odd).\n')
ctl_file.write('!\n')
ctl_file.write('! ------------------------------------'
'-----------------------------------------\n')
ctl_file.write('! Meaning of frequency specification parameters:\n')
ctl_file.write('!\n')
ctl_file.write('!SPECIFY_FREQUENCIES? (y/n):\n')
ctl_file.write('! <enter Y or N>\n')
ctl_file.write('!FREQUENCY SPECIFICATION: \n')
ctl_file.write('! If specify_frequencies = Y, then enter the \n')
ctl_file.write('! number of frequencies, freq(1), freq(2)..., '
'freq(nfreq)\n')
ctl_file.write('! If specify_frequencies = N, then enter \n')
ctl_file.write('! f_low, f_high, log-spaced (0=N, 1=Y), freq_param\n')
ctl_file.write('! if freq_param = 0.0, there is no interpolation, '
'and the FFT frequencies \n')
ctl_file.write('! are used between f_low and f_high '
'(log-spaced is ignored).\n')
ctl_file.write('! if freq_param /= 0.0 and log-spaced = 0, '
'then freq_param is the spacing of the\n')
ctl_file.write('! interpolated frequencies '
'between f_low and f_high\n')
ctl_file.write('! if freq_param /= 0.0 and log-spaced = 1, '
'then freq_param is the number of \n')
ctl_file.write('! interpolated frequencies between f_low and '
'f_high (NOTE: f_low must be > 0.0)\n')
ctl_file.write('! ---------------------------------------'
'--------------------------------------\n')
ctl_file.write('!\n')
ctl_file.write('!Name of summary file:\n')
ctl_file.write(' smc2fs2.sum\n')
ctl_file.write('PP: new set of parameters\n')
ctl_file.write('!tskip, tlength\n')
ctl_file.write(' 0.0 2000.0\n')
ctl_file.write('!dc_remove?\n')
ctl_file.write(' .true. \n')
ctl_file.write('!Length of taper at beginning and end of time series, '
'before adding zeros\n')
ctl_file.write('! to make the number of points in '
'the record a power of two.\n')
ctl_file.write(' 0.0 0.0\n')
ctl_file.write('!signnpw2(<0, backup for npw2, no zpad):\n')
ctl_file.write(' +1.0\n')
ctl_file.write('!smoothing: itype, ipow, df_smooth '
'(0 = FFT df), smooth_param\n')
ctl_file.write('! (see above for the meaning of these input parameters):\n')
ctl_file.write(' 0 1 0.0 0.20\n')
ctl_file.write('!SPECIFY_FREQUENCIES? (y/n):\n')
ctl_file.write(' N\n')
ctl_file.write('!FREQUENCY SPECIFICATION\n')
ctl_file.write(' 0.01 100.0 0 0.0 \n')
ctl_file.write('!character string to append to filename:\n')
ctl_file.write(' %s\n' % (name_string))
ctl_file.write('!Output in smc format (Y,N)?\n')
ctl_file.write('! ***IMPORTANT NOTE: Output cannot be in smc '
'format if use log-spaced \n')
ctl_file.write('! frequencies because programs such as smc2asc '
'have not been modified\n')
ctl_file.write('! to deal with log-spaced frequency.\n')
ctl_file.write(' n\n')
ctl_file.write('!Files to process:\n')
ctl_file.write('%s\n' % (input_file))
ctl_file.write('stop\n')
ctl_file.close()
def read_fas_file(fas_file):
"""
Reads FAS file and returns freq and fas arrays
"""
freqs = []
fas = []
# Read input file
input_file = open(fas_file, 'r')
# Skip headers
for line in input_file:
line = line.strip()
# skip blank lines
if not line:
continue
if line.startswith("freq"):
break
for line in input_file:
line = line.strip()
# skip blank lines
if not line:
continue
pieces = line.split()
pieces = [float(piece) for piece in pieces]
freqs.append(pieces[0])
fas.append(pieces[1])
# All done!
input_file.close()
return freqs, fas
def plot_fas(freqs, ns_data, ew_data, eas_smoothed_data, fas_plot, station):
"""
Create a plot of both FAS components
"""
# Generate plot
# Set plot dims
pylab.gcf().set_size_inches(11, 8.5)
pylab.gcf().clf()
# Adjust title y-position
t = pylab.title("Station: %s" % (station), size=12)
pylab.plot(freqs, ns_data, 'b', lw=0.75, label="NS")
pylab.plot(freqs, ew_data, 'r', lw=0.75, label="EW")
pylab.plot(freqs, eas_smoothed_data, 'k', lw=1.25, label="Smoothed EAS")
pylab.legend(loc='upper right')
pylab.xscale('log')
pylab.yscale('log')
pylab.ylabel('Fourier Amplitude (cm/s)')
pylab.xlabel('Frequency (Hz)')
pylab.axis([0.01, 100, 0.001, 1000])
pylab.grid(True)
pylab.grid(b=True, which='major', linestyle='-', color='lightgray')
pylab.grid(b=True, which='minor', linewidth=0.5, color='gray')
# Save plot
pylab.savefig(fas_plot, format="png",
transparent=False, dpi=plot_config.dpi)
pylab.close()
def ko98_smoothing(freqs, data, delta_freq, bexp):
"""
# ** smoothing of a function y (equally-spaced, dx) with the "Konno-Ohmachi"
# ** function sin (alog10(f/fc)^exp) / alog10(f/fc)^exp) ^^4
# ** where fc is the frequency around which the smoothing is performed
# ** exp determines the exponent 10^(1/exp) is the half-width of the peak
# ** cf Konno & Ohmachi, 1998, BSSA 88-1, pp. 228-241
"""
nx = len(freqs)
data_smooth = np.zeros(nx)
fratio = np.power(10., (2.5 / bexp))
data_smooth[0] = data[0]
for index in range(1, nx):
freq = freqs[index]
# Added check to avoid division by zero later and NaNs in the output file
if freq == 0.0:
data_smooth[index] = data[index]
continue
fc1 = freq / fratio
fc2 = freq * fratio
index1 = int(fc1 / delta_freq)
index2 = int((fc2 / delta_freq) + 1)
if index1 <= 1:
index1 = 0
if index2 >= nx:
index2 = nx
a1 = 0.0
a2 = 0.0
for j in range(index1, index2):
if j != index:
# Extra check to avoid NaNs in output file
if freqs[j] == 0.0:
data_smooth[index] = data[index]
break
c1 = bexp * np.log10(freqs[j] / freq)
c1 = np.power(np.sin(c1) / c1, 4.0)
a2 = a2 + c1
a1 = a1 + c1 * data[j]
else:
a2 = a2 + 1.0
a1 = a1 + data[index]
data_smooth[index] = a1 / a2
return data_smooth
def calculate_smoothed_eas(ns_file, ew_file, output_file=None):
"""
Calculates the smoothed EAS at the same frequencies as specified in
the input files
"""
b_param = 188.5 # cm/s
# Read data
freqs, ns_data = read_fas_file(ns_file)
_, ew_data = read_fas_file(ew_file)
eas_data = []
# Calculate EAS
for ns_comp, ew_comp in zip(ns_data, ew_data):
eas_data.append(np.sqrt(0.5*(pow(ns_comp, 2) + pow(ew_comp, 2))))
# Calculate Smoothed EAS
smoothed_eas = ko98_smoothing(freqs, eas_data,
freqs[1]-freqs[0],
b_param)
# Write data file if output_file is provided
if output_file is not None:
out_file = open(output_file, 'w')
out_file.write("# Freq(Hz)\t FAS H1 (cm/s)\t FAS H2 (cm/s)\t "
"EAS (cm/s)\t Smoothed EAS, b=%f (cm/s)\n" %
(b_param))
for freq, fas_h1, fas_h2, eas, s_eas in zip(freqs, ns_data,
ew_data, eas_data,
smoothed_eas):
out_file.write("%2.7E\t%2.7E\t%2.7E\t%2.7E\t%2.7E\n" %
(freq, fas_h1, fas_h2, eas, s_eas))
out_file.close()
# All done!
return freqs, ns_data, ew_data, eas_data, smoothed_eas
class FAS(object):
"""
Implement FAS analisys for the Broadband Platform
"""
def __init__(self, i_r_stations, sim_id=0):
"""
Initializes class variables
"""
self.sim_id = sim_id
self.r_stations = i_r_stations
def run(self):
"""
Run FAS analysis codes
"""
print("FAS Calculation".center(80, '-'))
install = install_cfg.InstallCfg.getInstance()
sim_id = self.sim_id
sta_base = os.path.basename(os.path.splitext(self.r_stations)[0])
self.log = os.path.join(install.A_OUT_LOG_DIR, str(sim_id),
"%d.fas_%s.log" % (sim_id, sta_base))
a_statfile = os.path.join(install.A_IN_DATA_DIR,
str(sim_id),
self.r_stations)
a_tmpdir = os.path.join(install.A_TMP_DATA_DIR, str(sim_id))
a_outdir = os.path.join(install.A_OUT_DATA_DIR, str(sim_id))
a_outdir_fas = os.path.join(a_outdir, "FAS")
#
# Make sure the tmp and out directories exist
#
bband_utils.mkdirs([a_tmpdir, a_outdir, a_outdir_fas], print_cmd=False)
slo = StationList(a_statfile)
site_list = slo.getStationList()
# Save current directory
old_cwd = os.getcwd()
os.chdir(a_tmpdir)
for site in site_list:
print("==> Processing station: %s" % (site.scode))
# Copy acc file to tmpdata
acc_file = "%d.%s.acc.bbp" % (sim_id, site.scode)
shutil.copy2(os.path.join(a_outdir, acc_file),
os.path.join(a_tmpdir, acc_file))
asc2smc_control_file = "asc2smc.ctl"
smc2fs2_control_file = "smc2fs2.ctl"
header_lines = bband_utils.count_header_lines(os.path.join(a_tmpdir,
acc_file))
# Work on both NS and EW components
for comp, data_column in zip(["NS", "EW"], [2, 3]):
# First we convert from BBP to SMC format
create_boore_asc2smc(os.path.join(a_tmpdir,
asc2smc_control_file),
acc_file, data_column, header_lines,
".smc8.%s" % (comp))
cmd = ("%s << END >> %s 2>&1\n" %
(os.path.join(install.A_USGS_BIN_DIR, "asc2smc"),
self.log) +
"%s\n" % (asc2smc_control_file) +
"END\n")
bband_utils.runprog(cmd, False, abort_on_error=True)
# Then, we run the smc2fs2 FAS tool
smc_file = "%s.smc8.%s" % (acc_file, comp)
create_boore_smc2fs2(os.path.join(a_tmpdir,
smc2fs2_control_file),
smc_file, ".no_smooth.fs.col")
cmd = ("%s >> %s 2>&1\n" %
(os.path.join(install.A_USGS_BIN_DIR, "smc2fs2"),
self.log))
bband_utils.runprog(cmd, False, abort_on_error=True)
# Calculate EAS and smoothed EAS
ns_file = os.path.join(a_tmpdir,
"%s.smc8.NS.no_smooth.fs.col" % (acc_file))
ew_file = os.path.join(a_tmpdir,
"%s.smc8.EW.no_smooth.fs.col" % (acc_file))
output_file = os.path.join(a_outdir_fas,
"%s.smc8.smooth.fs.col" % (acc_file))
(freqs, ns_fas,
ew_fas, eas, smoothed_eas) = calculate_smoothed_eas(ns_file,
ew_file,
output_file)
# Create plot
fas_plot = os.path.join(a_outdir_fas,
"%d.%s.fas.png" % (sim_id, site.scode))
plot_fas(freqs, ns_fas, ew_fas, smoothed_eas, fas_plot, site.scode)
# All done, restore working directory
os.chdir(old_cwd)
print("FAS Calculation Completed".center(80, '-'))
if __name__ == '__main__':
if len(sys.argv) < 3:
print("Usage: %s station_list sim_id" % (os.path.basename(sys.argv[0])))
sys.exit(1)
print("Testing Module: %s" % (os.path.basename(sys.argv[0])))
ME = FAS(sys.argv[1], sim_id=int(sys.argv[2]))
ME.run()
sys.exit(0)
| 43.65896
| 81
| 0.558145
|
023acfacadcfad3e947090d1e547141642dd8e7d
| 5,447
|
py
|
Python
|
features/0006-awses-message-decryption-generation-generate.py
|
seebees/aws-crypto-tools-test-vector-framework
|
1779b438f23cb356d5cab7ca40068dcb827b4cb1
|
[
"Apache-2.0"
] | 6
|
2019-05-27T19:43:16.000Z
|
2021-10-20T15:07:36.000Z
|
features/0006-awses-message-decryption-generation-generate.py
|
seebees/aws-crypto-tools-test-vector-framework
|
1779b438f23cb356d5cab7ca40068dcb827b4cb1
|
[
"Apache-2.0"
] | 11
|
2019-04-10T18:58:23.000Z
|
2020-07-09T23:43:02.000Z
|
features/0006-awses-message-decryption-generation-generate.py
|
seebees/aws-crypto-tools-test-vector-framework
|
1779b438f23cb356d5cab7ca40068dcb827b4cb1
|
[
"Apache-2.0"
] | 12
|
2019-03-18T22:29:01.000Z
|
2022-03-16T00:11:51.000Z
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
# Only Python 3.6+ compatibility is guaranteed.
import argparse
import uuid
import json
import os
import sys
from urllib.parse import urlunparse
from awses_message_encryption_utils import (
PLAINTEXTS,
RAW_RSA_PADDING_ALGORITHMS,
ALGORITHM_SUITES,
FRAME_SIZES,
ENCRYPTION_CONTEXTS,
UNPRINTABLE_UNICODE_ENCRYPTION_CONTEXT,
_providers,
_raw_aes_providers
)
MANIFEST_VERSION = 2
TAMPERINGS = (
"truncate",
"mutate",
"half-sign",
)
def _build_tests(keys):
"""Build all tests to define in manifest, building from current rules and provided keys manifest.
:param dict keys: Parsed keys manifest
"""
for algorithm in ALGORITHM_SUITES:
for frame_size in FRAME_SIZES:
for ec in ENCRYPTION_CONTEXTS:
for provider_set in _providers(keys):
yield (
str(uuid.uuid4()),
{
"encryption-scenario": {
"plaintext": "small",
"algorithm": algorithm,
"frame-size": frame_size,
"encryption-context": ec,
"master-keys": provider_set,
}
},
)
yield (
str(uuid.uuid4()),
{
"encryption-scenario": {
"plaintext": "tiny",
"algorithm": "0178",
"frame-size": 512,
"encryption-context": UNPRINTABLE_UNICODE_ENCRYPTION_CONTEXT,
"master-keys": next(_raw_aes_providers(keys)),
},
"decryption-method": "streaming-unsigned-only"
},
)
yield (
str(uuid.uuid4()),
{
"encryption-scenario": {
"plaintext": "tiny",
"algorithm": "0378",
"frame-size": 512,
"encryption-context": UNPRINTABLE_UNICODE_ENCRYPTION_CONTEXT,
"master-keys": next(_raw_aes_providers(keys)),
},
"decryption-method": "streaming-unsigned-only",
"result": {
"error": {
"error-description": "Signed message input to streaming unsigned-only decryption method"
}
}
}
)
for tampering in TAMPERINGS:
yield (
str(uuid.uuid4()),
{
"encryption-scenario": {
"plaintext": "tiny",
"algorithm": "0478" if tampering == "half-sign" else "0578",
"frame-size": 512,
"encryption-context": UNPRINTABLE_UNICODE_ENCRYPTION_CONTEXT,
"master-keys": next(_raw_aes_providers(keys)),
},
"tampering": tampering
}
)
yield (
str(uuid.uuid4()),
{
"encryption-scenario": {
"plaintext": "tiny",
"algorithm": "0578",
"frame-size": 512,
"encryption-context": UNPRINTABLE_UNICODE_ENCRYPTION_CONTEXT,
"master-keys": next(_raw_aes_providers(keys)),
},
"tampering": {
"change-edk-provider-info": [
"arn:aws:kms:us-west-2:658956600833:alias/EncryptOnly"
]
},
"decryption-master-keys": [
{
"type": "aws-kms",
"key": "us-west-2-encrypt-only"
}
]
},
)
def build_manifest(keys_filename):
"""Build the test-case manifest which directs the behavior of cross-compatibility clients.
:param str keys_file: Name of file containing the keys manifest
"""
with open(keys_filename, "r") as keys_file:
keys = json.load(keys_file)
keys_path = "/".join(keys_filename.split(os.path.sep))
keys_uri = urlunparse(("file", keys_path, "", "", "", ""))
return {
"manifest": {"type": "awses-decrypt-generate", "version": MANIFEST_VERSION},
"keys": keys_uri,
"plaintexts": PLAINTEXTS,
"tests": dict(_build_tests(keys)),
}
def main(args=None):
"""Entry point for CLI"""
parser = argparse.ArgumentParser(
description="Build an AWS Encryption SDK decrypt message generation manifest."
)
parser.add_argument(
"--human", action="store_true", help="Print human-readable JSON"
)
parser.add_argument("--keys", required=True, help="Keys manifest to use")
parsed = parser.parse_args(args)
manifest = build_manifest(parsed.keys)
kwargs = {}
if parsed.human:
kwargs["indent"] = 4
return json.dumps(manifest, **kwargs)
if __name__ == "__main__":
sys.exit(main())
| 30.601124
| 108
| 0.541399
|
91e15358e19b70616910be15d592b70c3a3eaf88
| 4,008
|
py
|
Python
|
docs/examples/python/scell_test_postgres.py
|
radetsky/themis
|
18ea2e39a7258e23ca9a5bb642691a9431c63d0b
|
[
"Apache-2.0"
] | 1,561
|
2015-05-20T05:19:29.000Z
|
2022-03-31T17:32:55.000Z
|
docs/examples/python/scell_test_postgres.py
|
radetsky/themis
|
18ea2e39a7258e23ca9a5bb642691a9431c63d0b
|
[
"Apache-2.0"
] | 536
|
2015-05-20T13:57:08.000Z
|
2022-03-15T18:02:59.000Z
|
docs/examples/python/scell_test_postgres.py
|
radetsky/themis
|
18ea2e39a7258e23ca9a5bb642691a9431c63d0b
|
[
"Apache-2.0"
] | 141
|
2015-05-20T13:22:45.000Z
|
2022-03-29T01:29:40.000Z
|
#
# Copyright (c) 2015 Cossack Labs Limited
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
store encrypted data in postgreSQL tables
we assume that table "scell_data" must contain data, length of which must be
not more then plain data
for store additional encryption data we will use separate table "scell_data_auth"
stored object is represented by two independent string value
"""
import base64
import sys
import psycopg2
import psycopg2.extras
from pythemis import scell
master_key = base64.b64decode(b'c2NlbGxfeG1sX2Zvcm1hdC1wcmVzZXJ2aW5nX2VuY3J5cHRpb24ucHk=')
CREATE_SCELL_DATA_TABLE_SQL = ("CREATE TABLE IF NOT EXISTS scell_data ("
"id serial PRIMARY KEY, num bytea, data bytea);")
CREATE_SCELL_DATA_AUTH_TABLE_SQL = (
"CREATE TABLE IF NOT EXISTS scell_data_auth ("
"id serial PRIMARY KEY, num bytea, data bytea);")
def init_table(connection):
with connection.cursor() as cursor:
cursor.execute(CREATE_SCELL_DATA_TABLE_SQL)
cursor.execute(CREATE_SCELL_DATA_AUTH_TABLE_SQL)
connection.commit()
def add_record(connection, field1, field2):
encryptor = scell.SCellTokenProtect(master_key)
# encrypt field1
encrypted_field1, field1_auth_data = encryptor.encrypt(
field1.encode('utf-8'))
# encrypt field2
encrypted_field2, field2_auth_data = encryptor.encrypt(
field2.encode('utf8'))
with connection.cursor() as cursor:
# store main cryptomessage
cursor.execute(
"INSERT INTO scell_data (num, data) VALUES (%s, %s) RETURNING ID",
(psycopg2.Binary(encrypted_field1),
psycopg2.Binary(encrypted_field2)))
new_id_value = cursor.fetchone()[0]
# store additional auth values
cursor.execute(
"INSERT INTO scell_data_auth (id, num, data) VALUES (%s, %s, %s)",
(new_id_value,
psycopg2.Binary(field1_auth_data),
psycopg2.Binary(field2_auth_data)))
connection.commit()
return new_id_value
def get_record(connection, id):
# retrieve record from db by id
dec = scell.SCellTokenProtect(master_key)
with connection.cursor() as cursor:
cursor.execute(
"SELECT * FROM scell_data "
"INNER JOIN scell_data_auth ON "
"scell_data.id = %s AND scell_data.id=scell_data_auth.id;",
[id]
)
row = cursor.fetchone()
_, DATA_NUM, DATA_DATA, AUTH_ID, AUTH_NUM, AUTH_DATA = range(6)
if (sys.version_info > (3, 0)):
print("stored data:",
row[DATA_NUM].tobytes(), row[AUTH_NUM].tobytes(),
row[DATA_DATA].tobytes(), row[AUTH_DATA].tobytes())
else:
print("stored data:",
bytes(row[DATA_NUM]), bytes(row[AUTH_NUM]),
bytes(row[DATA_DATA]), bytes(row[AUTH_DATA]))
num = dec.decrypt(bytes(row[DATA_NUM]),
bytes(row[AUTH_NUM])).decode('utf-8')
data = dec.decrypt(bytes(row[DATA_DATA]),
bytes(row[AUTH_DATA])).decode('utf-8')
return num, data
if __name__ == '__main__':
dsn = ("dbname=scell_token_protect_test user=postgres password=postgres "
"host=localhost")
with psycopg2.connect(dsn) as connection:
init_table(connection)
row_id = add_record(connection, "First record", "Second record")
record = get_record(connection, row_id)
print("real_data: ", record)
| 36.436364
| 90
| 0.662924
|
25b121d091c4cc802e2826591fee1af978b68e66
| 859
|
py
|
Python
|
lasttester/contrib/base/cache_shelve.py
|
gitdachong/lasttester
|
4c637d7704e9d577050e666f6ce01fc5d3752044
|
[
"MIT"
] | null | null | null |
lasttester/contrib/base/cache_shelve.py
|
gitdachong/lasttester
|
4c637d7704e9d577050e666f6ce01fc5d3752044
|
[
"MIT"
] | null | null | null |
lasttester/contrib/base/cache_shelve.py
|
gitdachong/lasttester
|
4c637d7704e9d577050e666f6ce01fc5d3752044
|
[
"MIT"
] | null | null | null |
#coding:utf-8
import shelve
import os
from ..utils import util_path
class CacheShelve():
def __init__(self,filename = ''):
self.file_name = 'lasttester_temp_shelve_cache'
if not filename:
import tempfile
filename = os.path.join(tempfile.gettempdir(),self.file_name)
if util_path.init_path(filename):
self._db = shelve.open(filename,)
else:
self._db = shelve.open(self.file_name)
def set(self, key, value):
self._db[key] = value
return self
def delete(self, key):
if key in self._db.keys():
del self._db[key]
return True
def get(self, key,default):
try:
val = self._db.get(key,default)
except:
val = default
return val
def __del__(self):
self._db.close()
| 24.542857
| 73
| 0.575087
|
ced52f884db31f6eb51e14bf8486482c17d2333d
| 2,499
|
py
|
Python
|
My_Model.py
|
block98k/MyKerasModel
|
9db513714d93b667a91ed73ed7bf0de8dc18de30
|
[
"Apache-2.0"
] | null | null | null |
My_Model.py
|
block98k/MyKerasModel
|
9db513714d93b667a91ed73ed7bf0de8dc18de30
|
[
"Apache-2.0"
] | null | null | null |
My_Model.py
|
block98k/MyKerasModel
|
9db513714d93b667a91ed73ed7bf0de8dc18de30
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from keras.layers import Input,Lambda
from keras import backend as K
from keras import metrics
from keras import optimizers
from keras.models import Model,Sequential
from keras.layers.advanced_activations import LeakyReLU
from keras.layers import Dense,Activation,Flatten
from keras.layers import Conv2D, MaxPooling2D,UpSampling2D,Add
from keras.layers import Conv2DTranspose,Reshape,concatenate,Reshape
from keras.layers.normalization import BatchNormalization
from keras import losses
from keras.optimizers import Adam
from My_Block import *
def create_encoder(input_size,latten_dim):
net_input = Input(shape=(input_size,input_size,3))
X = Conv_Leaky(net_input,16)
X = Conv_Leaky(X,16)
X = MaxPooling2D((2, 2))(X)
filters=32
for i in range(6):
X=kk_block(X,filters)
filters*=2
X = Conv_Leaky(X,latent_dim)
return Model(net_input,X)
def create_decoder(latent_dim,middle_dim,activations='sigmoid'):
Z_input = Input(shape=(1,1,latent_dim))
X = Conv2DTranspose(middle_dim,(3, 3),padding='valid',activation='relu')(Z_input)
X = BatchNormalization(momentum=0.8)(X)
filters=1024
for i in range(6):
X=kk_block_T(X,filters)
filters//=2
X = Conv_Leaky(X,16)
X = UpSampling2D()(X)
X = Conv_Leaky(X,16)
X = Conv_Leaky(X,16)
X = Conv2D(3,(3, 3))(X)
X = BatchNormalization()(X)
out = Activation(activations)(X)
return Model(Z_input,out)
def create_discriminator(input_size,activations='sigmoid'):
net_input = Input(shape=(input_size,input_size,3))
X = Conv_Leaky(net_input,32)
X = Conv_Leaky(X,32)
X = MaxPooling2D((2, 2))(X)
filters=64
for i in range(5):
X=kk_block(X,filters)
filters*=2
X = Conv_Leaky(X,128)
X = Flatten()(X)
validity=Dense(1, activation=activations)(X)
return Model(net_input,validity)
def create_mnist():
inputs = Input(shape=(28,28,1))
x = Conv_Leaky(inputs,32,padding='valid')
x = Conv_Leaky(x,64,padding='valid')
x = Res(x,64)
x = Res(x,64)
x = MaxPooling2D()(x)
x = Conv_Leaky(x,128,padding='valid')
x = Conv_Leaky(x,128,padding='valid')
x = Conv_Leaky(x,256,padding='valid')
x = Conv_Leaky(x,256,padding='valid')
x = Flatten()(x)
out = Dense(10,activation='softmax')(x)
return Model(inputs,out)
| 33.77027
| 86
| 0.673469
|
ef091809adbe283a1d9e59dd4e3b503656f90b58
| 1,097
|
py
|
Python
|
v1/projects/serializers/project.py
|
buckyroberts/Website-API
|
e74d202a41533c7622acbe12c793d047d44012ad
|
[
"MIT"
] | 64
|
2020-10-02T02:58:06.000Z
|
2022-01-29T20:00:50.000Z
|
v1/projects/serializers/project.py
|
buckyroberts/Website-API
|
e74d202a41533c7622acbe12c793d047d44012ad
|
[
"MIT"
] | 93
|
2020-10-04T22:53:46.000Z
|
2022-03-05T18:17:46.000Z
|
v1/projects/serializers/project.py
|
buckyroberts/Website-API
|
e74d202a41533c7622acbe12c793d047d44012ad
|
[
"MIT"
] | 21
|
2020-10-11T14:16:13.000Z
|
2021-11-09T17:50:25.000Z
|
from rest_framework import serializers
from ..models.project import Project
from ..serializers.milestone import MilestoneSerializer
class ProjectSerializer(serializers.ModelSerializer):
milestones = serializers.SerializerMethodField()
project_lead_display_name = serializers.CharField(source='project_lead.user.display_name', required=False)
class Meta:
fields = (
'benefits',
'centered_around_tnb',
'created_date',
'description',
'estimated_completion_date',
'github_url',
'logo',
'milestones',
'modified_date',
'overview',
'pk',
'problem',
'project_lead',
'project_lead_display_name',
'target_market',
'title',
'is_featured'
)
model = Project
read_only_fields = 'created_date', 'modified_date', 'project_lead_display_name'
def get_milestones(self, project):
return MilestoneSerializer(project.milestone_set.all(), many=True).data
| 30.472222
| 110
| 0.61258
|
fe310e08a5d7f118323661753954675ecb8a8c7b
| 212
|
py
|
Python
|
CoV19/bots/urls.py
|
just-ary27/CovBot-revamp
|
31af847237c4c5e7d5086a78950d06ecfd81318f
|
[
"MIT"
] | 1
|
2021-05-12T18:44:30.000Z
|
2021-05-12T18:44:30.000Z
|
CoV19/bots/urls.py
|
just-ary27/CovBot-revamp
|
31af847237c4c5e7d5086a78950d06ecfd81318f
|
[
"MIT"
] | 2
|
2021-09-22T18:41:37.000Z
|
2022-02-10T09:28:52.000Z
|
CoV19/bots/urls.py
|
just-ary27/CovBot-revamp
|
31af847237c4c5e7d5086a78950d06ecfd81318f
|
[
"MIT"
] | null | null | null |
from django.urls import path,include
from . import views
urlpatterns = [
path('',views.bots),
path('features',views.features),
path('commands',views.commands),
path('tutorial',views.tutorial),
]
| 21.2
| 36
| 0.683962
|
8f831af90751c0fa63f819d5d22f631555638324
| 2,336
|
py
|
Python
|
WebCrawler/recaptcha.py
|
psilva-leo/WebCrawler
|
a29baff6470f953f064fd274e847ac94385f72d3
|
[
"MIT"
] | null | null | null |
WebCrawler/recaptcha.py
|
psilva-leo/WebCrawler
|
a29baff6470f953f064fd274e847ac94385f72d3
|
[
"MIT"
] | null | null | null |
WebCrawler/recaptcha.py
|
psilva-leo/WebCrawler
|
a29baff6470f953f064fd274e847ac94385f72d3
|
[
"MIT"
] | null | null | null |
import scrapy
from twisted.internet.defer import inlineCallbacks
from selenium import webdriver
import os
import time
from random import randint
import urllib.request
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
class RecaptchaEngine(object):
CAPTCHA_XPATH = '//iframe[contains(@src, "recaptcha")]/@src'
def __init__(self, crawler):
self.crawler = crawler
def wait_between_random(self, min, max):
time.sleep(randint(min, max))
def download_audio(self, url):
urllib.request.urlretrieve(url, "audio.mp3")
def has_captcha(self, response):
sel = scrapy.Selector(response)
return len(sel.xpath(self.CAPTCHA_XPATH)) > 0
def solve_captcha(self, response):
sel = scrapy.Selector(response)
curfilePath = os.path.abspath(__file__)
curDir = os.path.abspath(
os.path.join(curfilePath, os.pardir))
parentDir = os.path.abspath(os.path.join(curDir, os.pardir))
drive_path = os.path.join(parentDir, "geckodriver")
driver = webdriver.Firefox(executable_path=drive_path)
driver.get(response.url)
recaptcha_iframe = driver.find_elements_by_tag_name("iframe")[0]
recaptcha_challenge_iframe = driver.find_elements_by_tag_name("iframe")[1]
recaptcha_url = recaptcha_iframe.get_attribute('src')
btnSubmit = driver.find_element_by_id('recaptcha-demo-submit')
driver.switch_to.frame(recaptcha_iframe)
btnCheck = driver.find_element_by_id('recaptcha-anchor')
btnCheck.click()
self.wait_between_random(3,9)
driver.switch_to.default_content()
try:
driver.switch_to.frame(recaptcha_challenge_iframe)
recaptcha_audio_button = driver.find_element_by_id('recaptcha-audio-button')
recaptcha_audio_button.click()
recaptcha_audio = driver.find_element_by_id('audio-source')
audio_url = recaptcha_audio.get_attribute('src')
self.download_audio(audio_url)
# TODO: Solve using speech recognition
print('Recaptcha solved!')
finally:
print("No recaptcha challenge!")
btnSubmit.click()
driver.quit()
| 33.371429
| 88
| 0.690068
|
ed6424ea57b32a1661e0f57ca0bda6a26b30c740
| 3,433
|
py
|
Python
|
day9.py
|
GeirOwe/adventOfCode
|
fee1420cb8ecce8b7aaf9d48472364be191ca2a2
|
[
"MIT"
] | 1
|
2021-12-20T11:10:59.000Z
|
2021-12-20T11:10:59.000Z
|
day9.py
|
GeirOwe/adventOfCode
|
fee1420cb8ecce8b7aaf9d48472364be191ca2a2
|
[
"MIT"
] | null | null | null |
day9.py
|
GeirOwe/adventOfCode
|
fee1420cb8ecce8b7aaf9d48472364be191ca2a2
|
[
"MIT"
] | 1
|
2021-12-02T14:40:12.000Z
|
2021-12-02T14:40:12.000Z
|
# advent of code
# response to the challenge by geir owe
# day9 - challenge: https://adventofcode.com/2020/day/9
#start function
def get_all_port_data(port_file):
codeList = []
#move all instructions in the boot file into a list
for code in port_file:
code = int(code.strip())
codeList.append(code)
return codeList
#end get_all_port_data function
#start function
def check_the_number(codeList, startPos, endPos, theNumber):
inList = False
checkNext = True
thisPos = startPos
nextPos = thisPos + 1
#check if the number is the sum of to elements in list
while checkNext:
calcResult = codeList[thisPos] + codeList[nextPos]
#if the number is the sum of the two elements, then we are done
if calcResult == theNumber:
inList = True
checkNext = False
else:
#keep this position fixed and loop thru the rest of the list
nextPos = nextPos + 1
moveToNextItem = True
while moveToNextItem:
calcResult = codeList[thisPos] + codeList[nextPos]
#if the number is the sum of the two elements, then we are done
if calcResult == theNumber:
inList = True
checkNext = False
moveToNextItem = False
else:
nextPos = nextPos + 1
#when at the end of the list; start from top again by moving to
# next item in list; unless we are the end; then we are done
if nextPos > endPos:
thisPos = thisPos + 1
nextPos = thisPos + 1
moveToNextItem = False
if thisPos == endPos:
checkNext = False
return inList
#end get_all_port_data function
#start function
def find_the_number(codeList, preambleLength):
#the part of the codelist to check
startPos = 0
endPos = preambleLength - 1
thePosToCheck = endPos + 1
itemsInList = len(codeList)
theNumber = codeList[thePosToCheck]
moreItems = True
inList = check_the_number(codeList, startPos, endPos, theNumber)
while (inList) & (moreItems):
# move to next number and check if there exist
# two numbers that sumarize to that number
startPos = startPos + 1
endPos = endPos + 1
thePosToCheck = endPos + 1
theNumber = codeList[thePosToCheck]
inList = check_the_number(codeList, startPos, endPos, theNumber)
#check if we are at the end - i.e. the item is not in the list
if endPos == (itemsInList - 1):
moreItems = False
return theNumber
#end function
#read the test puzzle input
#port_file = open('day9_test_puzzle_input.txt', 'r')
#thePreamble = 5
#read the puzzle input
port_file = open('day9_puzzle_input.txt', 'r')
thePreamble = 25
#move all codes into a list
codeList = get_all_port_data(port_file)
#the challenge:
# find the first number in the list (after the preamble) which is not
# the sum of two of the 25 numbers before it.
# What is the first number that does not have this property?
theNumber = 0
theNumber = find_the_number(codeList, thePreamble)
print('the first number in the list which is not the sum of two of the x numbers before it is: ', theNumber)
| 33.990099
| 108
| 0.617827
|
653558b6f7edef4c296743980b0b333e67fd63db
| 490
|
py
|
Python
|
templateSolution.py
|
DmitryNaimark/leetcode-solutions-python
|
16af5f3a9cb8469d82b14c8953847f0e93a92324
|
[
"MIT"
] | 1
|
2019-10-31T11:06:23.000Z
|
2019-10-31T11:06:23.000Z
|
templateSolution.py
|
DmitryNaimark/leetcode-solutions-python
|
16af5f3a9cb8469d82b14c8953847f0e93a92324
|
[
"MIT"
] | null | null | null |
templateSolution.py
|
DmitryNaimark/leetcode-solutions-python
|
16af5f3a9cb8469d82b14c8953847f0e93a92324
|
[
"MIT"
] | null | null | null |
# LeetCode Problem URL
# ---------------------------------------------------
# Runtime Complexity: O(?)
# Space Complexity: O(?)
# Idea:
# Solution
# ---------------------------------------------------
# Uses DN functions:
# ---------------------------------------------------
# ---------------------------------------------------
# Test Cases
# ---------------------------------------------------
solution = Solution()
print(solution.some_function)
| 23.333333
| 53
| 0.265306
|
146a17f784fb5239679806642ea5947ca36d8944
| 24,271
|
py
|
Python
|
Packs/Elasticsearch/Integrations/Elasticsearch_v2/Elasticsearch_v2.py
|
mchasepan/content
|
177c7fe86c4872141107f48075c6578daffc4bd4
|
[
"MIT"
] | null | null | null |
Packs/Elasticsearch/Integrations/Elasticsearch_v2/Elasticsearch_v2.py
|
mchasepan/content
|
177c7fe86c4872141107f48075c6578daffc4bd4
|
[
"MIT"
] | 2
|
2019-09-18T08:11:22.000Z
|
2020-11-24T18:50:28.000Z
|
Packs/Elasticsearch/Integrations/Elasticsearch_v2/Elasticsearch_v2.py
|
mchasepan/content
|
177c7fe86c4872141107f48075c6578daffc4bd4
|
[
"MIT"
] | 2
|
2020-10-11T18:01:32.000Z
|
2020-10-14T03:21:23.000Z
|
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
'''IMPORTS'''
from typing import List
from elasticsearch import Elasticsearch, RequestsHttpConnection, NotFoundError
from elasticsearch_dsl import Search
from elasticsearch_dsl.query import QueryString
from datetime import datetime
import json
import requests
import warnings
from dateutil.parser import parse
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
warnings.filterwarnings(action="ignore", message='.*using SSL with verify_certs=False is insecure.')
API_KEY_PREFIX = '_api_key_id:'
SERVER = demisto.params().get('url', '').rstrip('/')
USERNAME = demisto.params().get('credentials', {}).get('identifier')
PASSWORD = demisto.params().get('credentials', {}).get('password')
API_KEY_ID = USERNAME[len(API_KEY_PREFIX):] if USERNAME and USERNAME.startswith(API_KEY_PREFIX) else None
if API_KEY_ID:
USERNAME = None
API_KEY = (API_KEY_ID, PASSWORD)
PROXY = demisto.params().get('proxy')
HTTP_ERRORS = {
400: '400 Bad Request - Incorrect or invalid parameters',
401: '401 Unauthorized - Incorrect or invalid username or password',
403: '403 Forbidden - The account does not support performing this task',
404: '404 Not Found - Elasticsearch server was not found',
408: '408 Timeout - Check port number or Elasticsearch server credentials',
410: '410 Gone - Elasticsearch server no longer exists in the service',
500: '500 Internal Server Error - Internal error',
503: '503 Service Unavailable'
}
'''VARIABLES FOR FETCH INCIDENTS'''
TIME_FIELD = demisto.params().get('fetch_time_field', '')
FETCH_INDEX = demisto.params().get('fetch_index', '')
FETCH_QUERY = demisto.params().get('fetch_query', '')
FETCH_TIME = demisto.params().get('fetch_time', '3 days')
FETCH_SIZE = int(demisto.params().get('fetch_size', 50))
INSECURE = not demisto.params().get('insecure', False)
TIME_METHOD = demisto.params().get('time_method', 'Simple-Date')
def get_timestamp_first_fetch(last_fetch):
"""Gets the last fetch time as a datetime and converts it to the relevant timestamp format.
Args:
last_fetch(datetime): A datetime object setting up the last fetch time
Returns:
(num).The formatted timestamp
"""
# this theorticly shouldn't happen but just in case
if str(last_fetch).isdigit():
return int(last_fetch)
if TIME_METHOD == 'Timestamp-Seconds':
return int(last_fetch.timestamp())
elif TIME_METHOD == 'Timestamp-Milliseconds':
return int(last_fetch.timestamp() * 1000)
def timestamp_to_date(timestamp_string):
"""Converts a timestamp string to a datetime object.
Args:
timestamp_string(string): A string with a timestamp in it.
Returns:
(datetime).represented by the timestamp in the format '%Y-%m-%d %H:%M:%S.%f'
"""
# find timestamp in form of more than seconds since epoch: 1572164838000
if TIME_METHOD == 'Timestamp-Milliseconds':
timestamp_number = float(int(timestamp_string) / 1000)
# find timestamp in form of seconds since epoch: 1572164838
elif TIME_METHOD == 'Timestamp-Seconds':
timestamp_number = float(timestamp_string)
# convert timestamp (a floating point number representing time since epoch) to datetime
return datetime.utcfromtimestamp(timestamp_number)
def get_api_key_header_val(api_key):
"""
Check the type of the passed api_key and return the correct header value
for the `API Key authentication
<https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html>`
:arg api_key, either a tuple or a base64 encoded string
"""
if isinstance(api_key, (tuple, list)):
s = "{0}:{1}".format(api_key[0], api_key[1]).encode('utf-8')
return "ApiKey " + base64.b64encode(s).decode('utf-8')
return "ApiKey " + api_key
def elasticsearch_builder():
"""Builds an Elasticsearch obj with the necessary credentials, proxy settings and secure connection."""
proxies = handle_proxy() if PROXY else None
if API_KEY_ID:
es = Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, verify_certs=INSECURE,
api_key=API_KEY, proxies=proxies)
# this should be passed as api_key via Elasticsearch init, but this code ensures it'll be set correctly
if hasattr(es, 'transport'):
es.transport.get_connection().session.headers['authorization'] = get_api_key_header_val(API_KEY)
return es
if USERNAME:
return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, verify_certs=INSECURE,
http_auth=(USERNAME, PASSWORD), proxies=proxies)
else:
return Elasticsearch(hosts=[SERVER], connection_class=RequestsHttpConnection, verify_certs=INSECURE,
proxies=proxies)
def get_hit_table(hit):
"""Create context for a single hit in the search.
Args:
hit(Dict): a dictionary representing a single hit in the search.
Returns:
(dict).The hit context.
(list).the headers of the hit.
"""
table_context = {
'_index': hit.get('_index'),
'_id': hit.get('_id'),
'_type': hit.get('_type'),
'_score': hit.get('_score'),
}
headers = ['_index', '_id', '_type', '_score']
if hit.get('_source') is not None:
for source_field in hit.get('_source').keys():
table_context[str(source_field)] = hit.get('_source').get(str(source_field))
headers.append(source_field)
return table_context, headers
def results_to_context(index, query, base_page, size, total_dict, response):
"""Creates context for the full results of a search.
Args:
index(str): the index in which the search was made.
query(str): the query of the search.
base_page(int): the base page from which the search is made.
size(int): the amount of results to return.
total_dict(dict): a dictionary containing the info about thenumber of total results found
response(Dict): the raw response of the results.
Returns:
(dict).The full context for the search results.
(list).The metadata headers of the search.
(list).the context for the hits.
(list).the headers of the hits.
"""
search_context = {
'Server': SERVER,
'Index': index,
'Query': query,
'Page': base_page,
'Size': size,
'total': total_dict,
'max_score': response.get('hits').get('max_score'),
'took': response.get('took'),
'timed_out': response.get('timed_out')
}
hit_headers = [] # type: List
hit_tables = []
if total_dict.get('value') > 0:
for hit in response.get('hits').get('hits'):
single_hit_table, single_header = get_hit_table(hit)
hit_tables.append(single_hit_table)
hit_headers = list(set(single_header + hit_headers) - {'_id', '_type', '_index', '_score'})
hit_headers = ['_id', '_index', '_type', '_score'] + hit_headers
search_context['Results'] = response.get('hits').get('hits')
meta_headers = ['Query', 'took', 'timed_out', 'total', 'max_score', 'Server', 'Page', 'Size']
return search_context, meta_headers, hit_tables, hit_headers
def get_total_results(response_dict):
"""Creates a dictionary with all for the number of total results found
Args:
response_dict(dict): the raw response from elastic search.
Returns:
(dict).The total results info for the context.
(num).The number of total results.
"""
total_results = response_dict.get('hits', {}).get('total')
if not str(total_results).isdigit():
# if in version 7 - total number of hits has value field
total_results = total_results.get('value')
total_dict = response_dict.get('hits').get('total')
else:
total_dict = {
'value': total_results,
}
return total_dict, total_results
def search_command():
"""Performs a search in Elasticsearch."""
index = demisto.args().get('index')
query = demisto.args().get('query')
fields = demisto.args().get('fields') # fields to display
explain = 'true' == demisto.args().get('explain')
base_page = int(demisto.args().get('page'))
size = int(demisto.args().get('size'))
sort_field = demisto.args().get('sort-field')
sort_order = demisto.args().get('sort-order')
es = elasticsearch_builder()
que = QueryString(query=query)
search = Search(using=es, index=index).query(que)[base_page:base_page + size]
if explain:
# if 'explain parameter is set to 'true' - adds explanation section to search results
search = search.extra(explain=True)
if fields is not None:
fields = fields.split(',')
search = search.source(fields)
if sort_field is not None:
search = search.sort({sort_field: {'order': sort_order}})
response = search.execute().to_dict()
total_dict, total_results = get_total_results(response)
search_context, meta_headers, hit_tables, hit_headers = results_to_context(index, query, base_page,
size, total_dict, response)
search_human_readable = tableToMarkdown('Search Metadata:', search_context, meta_headers, removeNull=True)
hits_human_readable = tableToMarkdown('Hits:', hit_tables, hit_headers, removeNull=True)
total_human_readable = search_human_readable + '\n' + hits_human_readable
full_context = {
'Elasticsearch.Search(val.Query == obj.Query && val.Index == obj.Index '
'&& val.Server == obj.Server && val.Page == obj.Page && val.Size == obj.Size)': search_context
}
return_outputs(total_human_readable, full_context, response)
def fetch_params_check():
"""If is_fetch is ticked, this function checks that all the necessary parameters for the fetch are entered."""
str_error = [] # type:List
if TIME_FIELD == '' or TIME_FIELD is None:
str_error.append("Index time field is not configured.")
if FETCH_INDEX == '' or FETCH_INDEX is None:
str_error.append("Index is not configured.")
if FETCH_QUERY == '' or FETCH_QUERY is None:
str_error.append("Query by which to fetch incidents is not configured.")
if len(str_error) > 0:
return_error("Got the following errors in test:\nFetches incidents is enabled.\n" + '\n'.join(str_error))
def test_general_query(es):
"""Test executing query in fetch index.
Notes:
if is_fetch it ticked, this function runs a generay query to Elasticsearch just to make sure we get a response
from the FETCH_INDEX.
Args:
es(Elasticsearch): an Elasticsearch object to which we run the test.
"""
try:
query = QueryString(query='*')
search = Search(using=es, index=FETCH_INDEX).query(query)[0:1]
response = search.execute().to_dict()
_, total_results = get_total_results(response)
except NotFoundError as e:
return_error("Fetch incidents test failed.\nError message: {}.".format(str(e).split(',')[2][2:-1]))
def test_time_field_query(es):
"""Test executing query of fetch time field.
Notes:
if is_fetch is ticked, this function checks if the entered TIME_FIELD returns results.
Args:
es(Elasticsearch): an Elasticsearch object to which we run the test.
Returns:
(dict).The results of the query if they are returned.
"""
query = QueryString(query=TIME_FIELD + ':*')
search = Search(using=es, index=FETCH_INDEX).query(query)[0:1]
response = search.execute().to_dict()
_, total_results = get_total_results(response)
if total_results == 0:
# failed in getting the TIME_FIELD
return_error("Fetch incidents test failed.\nDate field value incorrect [{}].".format(TIME_FIELD))
else:
return response
def test_fetch_query(es):
"""Test executing fetch query.
Notes:
if is_fetch is ticked, this function checks if the FETCH_QUERY returns results.
Args:
es(Elasticsearch): an Elasticsearch object to which we run the test.
Returns:
(dict).The results of the query if they are returned.
"""
query = QueryString(query=str(TIME_FIELD) + ":* AND " + FETCH_QUERY)
search = Search(using=es, index=FETCH_INDEX).query(query)[0:1]
response = search.execute().to_dict()
_, total_results = get_total_results(response)
if total_results > 0:
return response
else:
# failed to get the TIME_FIELD with the FETCH_QUERY
# this can happen and not be an error if the FETCH_QUERY doesn't have results yet.
# Thus this does not return an error message
return None
def test_timestamp_format(timestamp):
"""if is_fetch is ticked and the TIME_METHOD chosen is a type of timestamp - this function checks that
the timestamp is in the correct format.
Args:
timestamp(sting): a timestamp string.
"""
timestamp_in_seconds_len = len(str(int(time.time())))
if TIME_METHOD == 'Timestamp-Seconds':
if not timestamp.isdigit():
return_error(f"The time field does not contain a standard timestamp.\nFetched: {timestamp}")
elif len(timestamp) > timestamp_in_seconds_len:
return_error(f"Fetched timestamp is not in seconds since epoch.\nFetched: {timestamp}")
elif TIME_METHOD == 'Timestamp-Milliseconds':
if not timestamp.isdigit():
return_error(f"The timestamp fetched is not in milliseconds.\nFetched: {timestamp}")
elif len(timestamp) <= timestamp_in_seconds_len:
return_error(f"Fetched timestamp is not in milliseconds since epoch.\nFetched: {timestamp}")
def test_func():
headers = {
'Content-Type': "application/json"
}
if API_KEY_ID:
headers['authorization'] = get_api_key_header_val(API_KEY)
try:
if USERNAME:
res = requests.get(SERVER, auth=(USERNAME, PASSWORD), verify=INSECURE, headers=headers)
else:
res = requests.get(SERVER, verify=INSECURE, headers=headers)
if res.status_code >= 400:
try:
res.raise_for_status()
except requests.exceptions.HTTPError as e:
if HTTP_ERRORS.get(res.status_code) is not None:
# if it is a known http error - get the message form the preset messages
return_error("Failed to connect. "
"The following error occurred: {}".format(HTTP_ERRORS.get(res.status_code)))
else:
# if it is unknown error - get the message from the error itself
return_error("Failed to connect. The following error occurred: {}".format(str(e)))
except requests.exceptions.RequestException as e:
return_error("Failed to connect. Check Server URL field and port number.\nError message: " + str(e))
if demisto.params().get('isFetch'):
# check the existence of all necessary fields for fetch
fetch_params_check()
try:
# build general Elasticsearch class
es = elasticsearch_builder()
# test if FETCH_INDEX exists
test_general_query(es)
# test if TIME_FIELD in index exists
response = test_time_field_query(es)
# try to get response from FETCH_QUERY - if exists check the time field from that query
temp = test_fetch_query(es)
if temp:
response = temp
# get the value in the time field
hit_date = str(response.get('hits', {}).get('hits')[0].get('_source').get(str(TIME_FIELD)))
# if not a timestamp test the conversion to datetime object
if 'Timestamp' not in TIME_METHOD:
parse(str(hit_date))
# test timestamp format and conversion to date
else:
test_timestamp_format(hit_date)
timestamp_to_date(hit_date)
except ValueError as e:
return_error("Inserted time format is incorrect.\n" + str(e) + '\n' + TIME_FIELD + ' fetched: ' + hit_date)
demisto.results('ok')
def incident_label_maker(source):
"""Creates labels for the created incident.
Args:
source(dict): the _source fields of a hit.
Returns:
(list).The labels.
"""
labels = []
for field in source.keys():
labels.append({'type': str(field), 'value': str(source.get(field))})
return labels
def results_to_incidents_timestamp(response, last_fetch):
"""Converts the current results into incidents.
Args:
response(dict): the raw search results from Elasticsearch.
last_fetch(num): the date or timestamp of the last fetch before this fetch
- this will hold the last date of the incident brought by this fetch.
Returns:
(list).The incidents.
(num).The date of the last incident brought by this fetch.
"""
current_fetch = last_fetch
incidents = []
for hit in response.get('hits', {}).get('hits'):
if hit.get('_source') is not None and hit.get('_source').get(str(TIME_FIELD)) is not None:
# if timestamp convert to iso format date and save the timestamp
hit_date = timestamp_to_date(str(hit.get('_source')[str(TIME_FIELD)]))
hit_timestamp = int(hit.get('_source')[str(TIME_FIELD)])
if hit_timestamp > last_fetch:
last_fetch = hit_timestamp
# avoid duplication due to weak time query
if hit_timestamp > current_fetch:
inc = {
'name': 'Elasticsearch: Index: ' + str(hit.get('_index')) + ", ID: " + str(hit.get('_id')),
'rawJSON': json.dumps(hit),
'labels': incident_label_maker(hit.get('_source')),
'occurred': hit_date.isoformat() + 'Z'
}
incidents.append(inc)
return incidents, last_fetch
def results_to_incidents_datetime(response, last_fetch):
"""Converts the current results into incidents.
Args:
response(dict): the raw search results from Elasticsearch.
last_fetch(datetime): the date or timestamp of the last fetch before this fetch
- this will hold the last date of the incident brought by this fetch.
Returns:
(list).The incidents.
(datetime).The date of the last incident brought by this fetch.
"""
last_fetch_timestamp = int(last_fetch.timestamp() * 1000)
current_fetch = last_fetch_timestamp
incidents = []
for hit in response.get('hits', {}).get('hits'):
if hit.get('_source') is not None and hit.get('_source').get(str(TIME_FIELD)) is not None:
hit_date = parse(str(hit.get('_source')[str(TIME_FIELD)]))
hit_timestamp = int(hit_date.timestamp() * 1000)
if hit_timestamp > last_fetch_timestamp:
last_fetch = hit_date
last_fetch_timestamp = hit_timestamp
# avoid duplication due to weak time query
if hit_timestamp > current_fetch:
inc = {
'name': 'Elasticsearch: Index: ' + str(hit.get('_index')) + ", ID: " + str(hit.get('_id')),
'rawJSON': json.dumps(hit),
'labels': incident_label_maker(hit.get('_source')),
# parse function returns iso format sometimes as YYYY-MM-DDThh:mm:ss+00:00
# and sometimes as YYYY-MM-DDThh:mm:ss
# we want to return format: YYYY-MM-DDThh:mm:ssZ in our incidents
'occurred': format_to_iso(hit_date.isoformat())
}
incidents.append(inc)
return incidents, format_to_iso(last_fetch.isoformat())
def format_to_iso(date_string):
"""Formatting function to make sure the date string is in YYYY-MM-DDThh:mm:ssZ format.
Args:
date_string(str): a date string in ISO format could be like: YYYY-MM-DDThh:mm:ss+00:00 or: YYYY-MM-DDThh:mm:ss
Returns:
str. A date string in the format: YYYY-MM-DDThh:mm:ssZ
"""
if len(date_string) > 19 and not date_string.endswith('Z'):
date_string = date_string[:-6]
if not date_string.endswith('Z'):
date_string = date_string + 'Z'
return date_string
def fetch_incidents():
last_run = demisto.getLastRun()
last_fetch = last_run.get('time')
# handle first time fetch
if last_fetch is None:
last_fetch, _ = parse_date_range(date_range=FETCH_TIME, date_format='%Y-%m-%dT%H:%M:%S.%f', utc=False,
to_timestamp=False)
last_fetch = parse(str(last_fetch))
last_fetch_timestamp = int(last_fetch.timestamp() * 1000)
# if timestamp: get the last fetch to the correct format of timestamp
if 'Timestamp' in TIME_METHOD:
last_fetch = get_timestamp_first_fetch(last_fetch)
last_fetch_timestamp = last_fetch
# if method is simple date - convert the date string to datetime
elif 'Simple-Date' == TIME_METHOD:
last_fetch = parse(str(last_fetch))
last_fetch_timestamp = int(last_fetch.timestamp() * 1000)
# if last_fetch is set and we are in a "Timestamp" method - than the last_fetch_timestamp is the last_fetch.
else:
last_fetch_timestamp = last_fetch
es = elasticsearch_builder()
query = QueryString(query=FETCH_QUERY + " AND " + TIME_FIELD + ":*")
# Elastic search can use epoch timestamps (in milliseconds) as date representation regardless of date format.
search = Search(using=es, index=FETCH_INDEX).filter({'range': {TIME_FIELD: {'gt': last_fetch_timestamp}}})
search = search.sort({TIME_FIELD: {'order': 'asc'}})[0:FETCH_SIZE].query(query)
response = search.execute().to_dict()
_, total_results = get_total_results(response)
incidents = [] # type: List
if total_results > 0:
if 'Timestamp' in TIME_METHOD:
incidents, last_fetch = results_to_incidents_timestamp(response, last_fetch)
demisto.setLastRun({'time': last_fetch})
else:
incidents, last_fetch = results_to_incidents_datetime(response, last_fetch)
demisto.setLastRun({'time': str(last_fetch)})
demisto.info('extract {} incidents'.format(len(incidents)))
demisto.incidents(incidents)
def parse_subtree(my_map):
"""
param: my_map - tree element for the schema
return: tree elements under each branch
"""
# Recursive search in order to retrieve the elements under the branches in the schema
res = {}
for k in my_map:
if 'properties' in my_map[k]:
res[k] = parse_subtree(my_map[k]['properties'])
else:
res[k] = "type: " + my_map[k].get('type', "")
return res
def get_mapping_fields_command():
"""
Maps a schema from a given index
return: Elasticsearch schema structure
"""
indexes = FETCH_INDEX.split(',')
elastic_mapping = {}
for index in indexes:
res = requests.get(SERVER + '/' + index + '/_mapping', auth=(USERNAME, PASSWORD), verify=INSECURE)
my_map = res.json()[index]['mappings']['properties']
elastic_mapping[index] = {"_id": "doc_id", "_index": index}
elastic_mapping[index]["_source"] = parse_subtree(my_map)
demisto.results(elastic_mapping)
def main():
try:
LOG('command is %s' % (demisto.command(),))
if demisto.command() == 'test-module':
test_func()
elif demisto.command() == 'fetch-incidents':
fetch_incidents()
elif demisto.command() in ['search', 'es-search']:
search_command()
elif demisto.command() == 'get-mapping-fields':
get_mapping_fields_command()
except Exception as e:
return_error("Failed executing {}.\nError message: {}".format(demisto.command(), str(e)), error=e)
main()
| 37.805296
| 119
| 0.64781
|
9b2c51e29299a52c7d7b62eb4bf413c8c617f17a
| 377
|
py
|
Python
|
examples/projects/store_demoqa/tests/header/search_for_a_product.py
|
vault-the/golem
|
3bd132685b148c0d9c12deeebfc00569d07063e4
|
[
"MIT"
] | null | null | null |
examples/projects/store_demoqa/tests/header/search_for_a_product.py
|
vault-the/golem
|
3bd132685b148c0d9c12deeebfc00569d07063e4
|
[
"MIT"
] | null | null | null |
examples/projects/store_demoqa/tests/header/search_for_a_product.py
|
vault-the/golem
|
3bd132685b148c0d9c12deeebfc00569d07063e4
|
[
"MIT"
] | null | null | null |
description = ''
pages = ['header',
'checkout',
'search_result']
def setup(data):
pass
def test(data):
navigate('http://store.demoqa.com/')
send_keys(header.search_input, 'mouse')
press_key(header.search_input, 'ENTER')
search_result.verify_product_in_results('Magic Mouse')
capture('search result')
def teardown(data):
pass
| 17.952381
| 58
| 0.65252
|
edcb04f43c91df6250a54cfcae424cd9a7bba3f9
| 11,503
|
py
|
Python
|
nrfjprog/model/perform_command_jlink.py
|
strobo-inc/nrfjprog
|
3c5667959948081aba44ca8981544a9df0dffae6
|
[
"BSD-3-Clause"
] | null | null | null |
nrfjprog/model/perform_command_jlink.py
|
strobo-inc/nrfjprog
|
3c5667959948081aba44ca8981544a9df0dffae6
|
[
"BSD-3-Clause"
] | null | null | null |
nrfjprog/model/perform_command_jlink.py
|
strobo-inc/nrfjprog
|
3c5667959948081aba44ca8981544a9df0dffae6
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2016, Nordic Semiconductor
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Nordic Semiconductor ASA nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from pynrfjprog import API
from nrfjprog import nrfjprog_version
from nrfjprog.model import device
from nrfjprog.model.perform_command import PerformCommand
class SetupCommand(object):
"""
Class that handles the pynrfjprog api instance, some shared arguments, and logging.
"""
DEFAULT_JLINK_SPEED_KHZ = 5000
def __init__(self, args, do_not_initialize_api=False):
"""
Initialize the class's properties, sets up the connection to our target device, and configures some logging options.
:param Object args: Arguments the command was called with.
:param bool do_not_initialize_api: If api should be initialized (the connection to the target device should be set up - a command may not need to connect to the target device).
"""
self.args = args
self.api = None
self.device = None
self.device_version = None
if not do_not_initialize_api:
if self._setup('NRF52'):
pass
elif self._setup('NRF51'):
pass
else:
assert(False), 'Unknown device family.'
def cleanup(self):
"""
Disconnect from the emulator (debugger) and close the pynrfjprog api instance.
"""
self.api.disconnect_from_emu()
self.api.close()
self.api = None
self.device = None
self.device_version = None
def connect_to_emu(self, api):
"""
This method should only be called when this class is created with the do_not_initialize_api flag (i.e. called by recover()).
:param API api: An instance of api that has been initialized by the caller.
"""
assert (self.api is None), "The class's api property has already been initialized."
self.api = api
self._connect_to_emu()
def _connect_to_emu(self):
"""
Connect to the emulator (debugger) with the specific serial number and/or clock speed if either was specified in the command-line arguments.
"""
if self.args.snr and self.args.clockspeed:
self.api.connect_to_emu_with_snr(self.args.snr, self.args.clockspeed)
elif self.args.snr:
self.api.connect_to_emu_with_snr(self.args.snr, self.DEFAULT_JLINK_SPEED_KHZ)
elif self.args.clockspeed:
self.api.connect_to_emu_without_snr(self.args.clockspeed)
else:
self.api.connect_to_emu_without_snr(self.DEFAULT_JLINK_SPEED_KHZ)
def _setup(self, device_family_guess):
"""
Connect to target device and check if device_family_guess is correct. If correct, initialize api and device_version and return True. Else, cleanup and return False.
:param String device_family_guess: The device family type to try.
:return Boolean: If device_family_guess was correct and we initialized everything successfully.
"""
self.api = API.API(device_family_guess)
self.api.open()
self._connect_to_emu()
if not self.args.deviceversion:
try:
self.device_version = self.api.read_device_version()
except API.APIError as error:
if error.err_code == API.NrfjprogdllErr.WRONG_FAMILY_FOR_DEVICE:
self.cleanup()
return False
else:
assert(False), 'Error!'
else:
self.device_version = self.args.deviceversion
self.device = device.NRF5xDevice(self.device_version)
return True
class JLink(PerformCommand):
"""
"""
def erase(self, args):
nrf = SetupCommand(args)
if args.erasepage:
nrf.api.erase_page(args.erasepage)
elif args.eraseuicr:
nrf.api.erase_uicr()
else:
nrf.api.erase_all()
nrf.cleanup()
def halt(self, args):
nrf = SetupCommand(args)
nrf.api.halt()
nrf.cleanup()
def ids(self, args):
nrf = SetupCommand(args, do_not_initialize_api=True)
api = API.API('NRF52') # Device family type arbitrary since we are not connecting to a device. Use NRF52 by default.
api.open()
ids = api.enum_emu_snr()
if ids:
print(sorted(ids))
api.close()
def memrd(self, args):
nrf = SetupCommand(args)
data = nrf.api.read(args.addr, args.length)
self.output_data(args.addr, data)
nrf.cleanup()
def memwr(self, args):
nrf = SetupCommand(args)
nrf.api.write_u32(args.addr, args.val, self.is_flash_addr(args.addr, nrf.device))
nrf.cleanup()
def pinresetenable(self, args):
nrf = SetupCommand(args)
assert(nrf.device_version[:5] != 'NRF51'), "Enabling pin reset is not a valid command for nRF51 devices."
uicr_pselreset0_addr = 0x10001200
uicr_pselreset1_addr = 0x10001204
uicr_pselreset_21_connect = 0x15 # Writes the CONNECT and PIN bit fields (reset is connected and GPIO pin 21 is selected as the reset pin).
nrf.api.write_u32(uicr_pselreset0_addr, uicr_pselreset_21_connect, True)
nrf.api.write_u32(uicr_pselreset1_addr, uicr_pselreset_21_connect, True)
nrf.api.sys_reset()
nrf.cleanup()
def program(self, args):
from intelhex import IntelHex
nrf = SetupCommand(args)
if args.eraseall:
nrf.api.erase_all()
if args.sectorsanduicrerase:
nrf.api.erase_uicr()
hex_file = IntelHex(args.file)
for segment in hex_file.segments():
start_addr, end_addr = segment
size = end_addr - start_addr
if args.sectorserase or args.sectorsanduicrerase:
start_page = int(start_addr / nrf.device.page_size)
end_page = int(end_addr / nrf.device.page_size)
for page in range(start_page, end_page + 1):
nrf.api.erase_page(page * nrf.device.page_size)
data = hex_file.tobinarray(start=start_addr, size=(size))
nrf.api.write(start_addr, data.tolist(), True)
if args.verify:
read_data = nrf.api.read(start_addr, len(data))
assert (self.byte_lists_equal(data, read_data)), 'Verify failed. Data readback from memory does not match data written.'
self._reset(nrf, args)
nrf.cleanup()
def rbp(self, args):
nrf = SetupCommand(args)
if args.rbplevel == 'CR0':
nrf.api.readback_protect(API.ReadbackProtection.REGION_0)
else:
nrf.api.readback_protect(API.ReadbackProtection.ALL)
nrf.cleanup()
def readregs(self, args):
nrf = SetupCommand(args)
for reg in API.CpuRegister:
print('{}: {}'.format(reg.name, hex(nrf.api.read_cpu_register(reg))))
nrf.cleanup()
def readtofile(self, args):
nrf = SetupCommand(args)
try:
with open(args.file, 'w') as file:
if args.readcode or not (args.readuicr or args.readram):
file.write('----------Code FLASH----------\n\n')
self.output_data(nrf.device.flash_start, nrf.api.read(nrf.device.flash_start, nrf.device.flash_size), file)
file.write('\n\n')
if args.readuicr:
file.write('----------UICR----------\n\n')
self.output_data(nrf.device.uicr_start, nrf.api.read(nrf.device.uicr_start, nrf.device.page_size), file)
file.write('\n\n')
if args.readram:
file.write('----------RAM----------\n\n')
self.output_data(nrf.device.ram_start, nrf.api.read(nrf.device.ram_start, nrf.device.ram_size), file)
except IOError as error:
print("{}.".format(error))
nrf.cleanup()
def recover(self, args):
nrf = SetupCommand(args, do_not_initialize_api=True)
api = API.API(args.family)
api.open()
nrf.connect_to_emu(api)
nrf.api.recover()
nrf.cleanup()
def reset(self, args):
nrf = SetupCommand(args)
self._reset(nrf, args, default_sys_reset=True)
nrf.cleanup()
def run(self, args):
nrf = SetupCommand(args)
if args.pc != None and args.sp != None:
nrf.api.run(args.pc, args.sp)
elif args.pc != None or args.sp != None:
assert(False), 'Both the PC and the SP must be specified.'
else:
nrf.api.go()
nrf.cleanup()
def verify(self, args):
from intelhex import IntelHex
nrf = SetupCommand(args)
hex_file = IntelHex(args.file)
for segment in hex_file.segments():
start_addr, end_addr = segment
size = end_addr - start_addr
data = hex_file.tobinarray(start=start_addr, size=size)
read_data = nrf.api.read(start_addr, size)
assert (self.byte_lists_equal(data, read_data)), 'Verify failed. Data readback from memory does not match data written.'
nrf.cleanup()
def version(self, args):
nrf = SetupCommand(args, do_not_initialize_api=True)
api = API.API('NRF52')
api.open()
jlink_arm_dll_version = api.dll_version()
print('JLink version: {}'.format(jlink_arm_dll_version))
print('nRFjprog version: {}'.format(nrfjprog_version.NRFJPROG_VERSION))
api.close()
# Helper functions.
def _reset(self, nrf, args, default_sys_reset=False):
"""
Reset and run the device.
"""
if args.debugreset:
nrf.api.debug_reset()
elif args.pinreset:
nrf.api.pin_reset()
elif args.systemreset or default_sys_reset:
nrf.api.sys_reset()
else:
return
nrf.api.go()
| 34.235119
| 186
| 0.62801
|
67ae2a9e34caa0ead78a1726dfbcf1036eb0204a
| 429
|
py
|
Python
|
Program/storage/entity_storage.py
|
AntonPashkowskiy/Tars
|
07cf1a7eb466c763ffb7ab46cd173d76067fe9d3
|
[
"MIT"
] | null | null | null |
Program/storage/entity_storage.py
|
AntonPashkowskiy/Tars
|
07cf1a7eb466c763ffb7ab46cd173d76067fe9d3
|
[
"MIT"
] | null | null | null |
Program/storage/entity_storage.py
|
AntonPashkowskiy/Tars
|
07cf1a7eb466c763ffb7ab46cd173d76067fe9d3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
from storage.interfaces.entity_storage_interface import EntityStorageInterface
class EntityStorage(EntityStorageInterface):
def __init__(self, target_collection):
self._target_collection = target_collection
def create(entity):
pass
def read(entity_id):
pass
def update(entity):
pass
def delete(entity_id):
pass
def read_all():
pass
| 19.5
| 78
| 0.680653
|
747054636644d011a3f3e2735fa8f2617e73d05c
| 2,085
|
py
|
Python
|
server/ioctl/app/crud/base.py
|
amr390/ioctl
|
ca6bd9bbebcf366c60359a2265b20083be87e48c
|
[
"MIT"
] | null | null | null |
server/ioctl/app/crud/base.py
|
amr390/ioctl
|
ca6bd9bbebcf366c60359a2265b20083be87e48c
|
[
"MIT"
] | null | null | null |
server/ioctl/app/crud/base.py
|
amr390/ioctl
|
ca6bd9bbebcf366c60359a2265b20083be87e48c
|
[
"MIT"
] | null | null | null |
from typing import Any, Dict, Generic, List, Optional, Type, TypeVar, Union
from fastapi.encoders import jsonable_encoder
from pydantic import BaseModel
from sqlalchemy.orm import Session
from app.db.base_class import Base
ModelType = TypeVar("ModelType", bound=Base)
CreateSchemaType = TypeVar("CreateSchemaType", bound=BaseModel)
UpdateSchemaType = TypeVar("UpdateSchemaType", bound=BaseModel)
class CRUDBase(Generic[ModelType, CreateSchemaType, UpdateSchemaType]):
def __init__(self, model: Type[ModelType]):
"""
CRUD object with default methods to Create, Read Update, Delete (CRUD)
**Parameters**
* `model`: A SQLALchemy model class
* `schema`: A Pydantic model (schema) class
"""
self.model = model
def get(self, db: Session, id: Any) -> Optional[ModelType]:
return db.query(self.model).filter(self.model.id == id).first()
def get_multi(
self, db: Session, *, skip: int = 0, limit: int = 100
) -> List[ModelType]:
return db.query(self.model).offset(skip).limit(limit).all()
def create(self, db: Session, *, obj_in: CreateSchemaType) -> ModelType:
obj_in_data = jsonable_encoder(obj_in)
db_obj = self.model(**obj_in_data)
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return db_obj
def update(
self,
db: Session,
*,
db_obj: ModelType,
obj_in: Union[UpdateSchemaType, Dict[str, Any]]
) -> ModelType:
obj_data = jsonable_encoder(db_obj)
if isinstance(obj_in, dict):
update_data = obj_in
else:
update_data = obj_in.dict(exclude_unset=True)
for field in obj_data:
if field in update_data:
setattr(db_obj, field, update_data[field])
db.add(db_obj)
db.commit()
db.refresh(db_obj)
return (db_obj)
def remove(self, db: Session, *, id: int) -> ModelType:
obj = db.query(self.model).get(id)
db.delete(obj)
db.commit()
return obj
| 30.661765
| 78
| 0.62494
|
6fc0f5058b7906a39e05db39ec9476f51afd4b77
| 11,541
|
py
|
Python
|
Source/fluxes.py
|
vmmunoza/-HawkHunter
|
d35d9820df8aa29faa2908d4d1f35204b7c0edf9
|
[
"MIT"
] | 1
|
2022-03-25T12:04:54.000Z
|
2022-03-25T12:04:54.000Z
|
Source/fluxes.py
|
vmmunoza/-HawkHunter
|
d35d9820df8aa29faa2908d4d1f35204b7c0edf9
|
[
"MIT"
] | null | null | null |
Source/fluxes.py
|
vmmunoza/-HawkHunter
|
d35d9820df8aa29faa2908d4d1f35204b7c0edf9
|
[
"MIT"
] | 3
|
2022-03-30T02:02:10.000Z
|
2022-03-30T09:20:34.000Z
|
#-------------------------------
# Compute the neutrino fluxes
#-------------------------------
from Source.evaporation import *
#-------
# Generic stuff
#-------
# Number density of PBHs n_pbh at z = 0 (cm^-3)
# beta and f_pbh defined outside of the function (i.e., f_PBH=1 or beta'=1 for this formula, scale it later)
def n_pbh(Mpbh, as_DM, mass_spec, sig=0):
# For monochromatic
if mass_spec==0:
if as_DM:
return Om_dm*rho_c/Mpbh
else:
# See Carr 2010
return (GpToCm)**(-3.)/(7.98e-29)*(Mpbh/Msun)**(-3./2.)
# For lognormal, employ normalization such that BlackHawk parameter amplitude_lognormal = 1
elif mass_spec==1:
Mmean = Mpbh*np.exp(sig**2./2.)
if as_DM:
return Om_dm*rho_c/Mmean
else:
return (GpToCm)**(-3.)/(7.98e-29)*(Mpbh/Msun)**(-3./2.)*np.exp(-9./8.*sig**2.)
# Modify the flux for each flavour due to neutrino oscillations (oscillation angles from 2006.11237)
def flux_oscillations(F_nue, F_numu, F_nutau):
Fprime_nue = 0.542396*F_nue + 0.176253*F_numu + 0.281351*F_nutau
Fprime_numu = 0.176253*F_nue + 0.451522*F_numu + 0.372224*F_nutau
Fprime_nutau = 0.281351*F_nue + 0.372224*F_numu + 0.346425*F_nutau
return Fprime_nue, Fprime_numu, Fprime_nutau
# Sufix for text files, depending on if mass function is monochromatic (mc) or lognormal (ln), specifying sigma in the later case
def sufix(mass_spec, sig = 0):
if mass_spec==0:
sufix = "_mc.txt"
elif mass_spec==1:
sufix = "_ln_sig_{:.1e}.txt".format(sig)
return sufix
#-------
# Galactic component (from 2010.16053)
#-------
# Some NFW parameters
r_dot = 8.5 # kpc
r_h = 200. # kpc
r_s = 20. # kpc
rhocentral = 0.4 # GeV/cm^3
# Radial distance as a function of the cosinus and line-of-sight distance
def galactocentric_d(l, cos):
return np.sqrt(r_dot**2.-2.*r_dot*l*cos+l**2.)
# NFW profile
def NFW_profile(l,cos):
r = galactocentric_d(l, cos)
return rhocentral*(r_dot/r)*((1.+r_dot/r_s)/(1.+r/r_s))**2.
# Maximum line-of-sight distance
def l_max(cos):
return np.sqrt(r_h**2. - r_dot**2.*(1.-cos**2.)) + r_dot*cos
# D-factor, \int dl \rho_NFW(r(l))
def D_factor():
# First integrate in l:
cos_vec=np.linspace(-1.,1.,100)
Integral_l=[]
for cos in cos_vec:
lvec = np.linspace(0., l_max(cos), 100)
I = integrate.simps(NFW_profile(lvec, cos), lvec)
Integral_l.append(I)
# Now integrate in cosine:
Galactic_contrib = integrate.simps( Integral_l, cos_vec)/2. # (factor of 2 comes from 2pi of integral over 4pi factor of Eq 4.)
# We must add a kpc to cm factor to match units of flux:
return MpcToCm*1.e-3*Galactic_contrib/gr_to_GeV # This has units of g/cm^2
galacticfactor = D_factor()
# Galactic flux (f_PBH=1, scale it afterwards)
def galactic_flux(Mpbh, energyrate, mass_spec, sig=0):
if mass_spec==0:
galflux = energyrate*galacticfactor/Mpbh
elif mass_spec==1:
Mmean = Mpbh*np.exp(sig**2./2.)
galflux = energyrate*galacticfactor/Mmean
return galflux
#-------
# Extragalactic flux (only for instantaneous)
#-------
# Compute the diffuse flux
def flux(zmin, zmax, Mpbh, E_vec, spec_int, as_DM, mass_spec, sig=0, aprox=0):
flux_vec = []
logonepluszz = np.linspace( np.log(1.+zmin), np.log(1.+zmax) , 100)
onepluszz = np.exp(logonepluszz)
for E in E_vec:
if aprox:
rate = blackbody(E*onepluszz, Mpbh)
else:
rate = dNdEdt_extension(E_vec[-1],spec_int,E*onepluszz,Mpbh)
integral = integrate.simps( dtdz(onepluszz-1.)*rate*onepluszz*onepluszz, logonepluszz )
flux_vec.append( n_pbh(Mpbh, as_DM, mass_spec, sig)*integral*c )
return np.array(flux_vec)
# Compute the approximated flux (not employed, does not work well, it has to be revised)
def flux_approx(zmin, zmax, Mpbh, E, as_DM, mass_spec): #
if E>4.*Tpbh(Mpbh):
integral = blackbody(E, Mpbh)*dtdz(0.)
else:
logonepluszz = np.linspace( np.log(1.+zmin), np.log(1.+zmax) , 100)
onepluszz = np.exp(logonepluszz)
#integral = integrate.simps( dtdz(onepluszz-1.)*rate*onepluszz*onepluszz, logonepluszz )
integral = E**2.*27.*np.pi*G_N**2.*Mpbh**2.*integrate.simps( dtdz(onepluszz-1.)*onepluszz**2.*onepluszz*onepluszz, logonepluszz )
return n_pbh(Mpbh, as_DM, mass_spec, sig)*integral*c
flux_approx = np.vectorize( flux_approx )
#-------
# Routine to compute fluxes for a range of PBH masses
#-------
# as_DM: 1 for PBHs as DM and use f_PBH, otherwise, it uses beta'
# mass_spec: mass spectrum, 0 for monochromatic, 1 for lognormal
# sig: variance for lognormal mass spectrum (only used if mass_spec=1)
# use_inst: if 1, use instantaneous Blackhawk tables (not recommended for masses <2e15), otherwise it employs total tables
# flux normalization assumes fpbh=1 (for PBHs as DM, as_DM=1) or beta'=1 (for as_DM=0)
def compute_flux(Mpbhs, as_DM, mass_spec = 0, sig = 0, use_inst = 0):
sufx = sufix(mass_spec, sig)
# This will be filled by PBH mass, energy and flux, not used in further computations of this code, for exporting only
onefile = []
for mm, Mpbh in enumerate(Mpbhs):
print("Mass: {:.1e} g".format( Mpbh ) )
folder = folder_blackhawk+"{:.1e}/".format(Mpbh)
if not os.path.exists("fluxes/{:.1e}".format(Mpbh)):
os.system("mkdir "+"fluxes/{:.1e}".format(Mpbh))
if use_inst:
#---------------
# Instantaneous spectra
#---------------
data_primary = np.genfromtxt(folder+"instantaneous_primary_spectra"+sufx, skip_header = 2)
data_secondary = np.genfromtxt(folder+"instantaneous_secondary_spectra"+sufx, skip_header = 2)
E_prim = data_primary[:,0]
Evec = data_secondary[:,0]
spec_tot_e, spec_tot_mu, spec_tot_tau = flux_oscillations(data_secondary[:,3], data_secondary[:,4], data_secondary[:,5])
tot_sec = spec_tot_e/2. # 1/2 for taking only neutrino (or antineutrino)
spec_prim = interp1d(E_prim,data_primary[:,6],fill_value="extrapolate")
spec_sec = interp1d(Evec,tot_sec,fill_value="extrapolate")
zmin = max([0., zevap(Mpbh)])
# Take an arbitrary large maximum z
zmax = (1.+zmin)*1.e5 - 1.
flux_prim = flux(zmin, zmax, Mpbh, E_prim, spec_prim, as_DM, mass_spec, sig)/1.e3 # Change units to MeV (factor 1.e3)
flux_sec = flux(zmin, zmax, Mpbh, Evec, spec_sec, as_DM, mass_spec, sig)/1.e3 # Change units to MeV (factor 1.e3)
if Mpbh>=Mevap:
flux_galac = galactic_flux(Mpbh, spec_sec(Evec), mass_spec, sig)/1.e3 # Change units to MeV (factor 1.e3)
flux_sec += flux_galac
# Change units to MeV (factor 1.e3)
Evec = Evec*1.e3
np.savetxt("fluxes/{:.1e}/flux_isDM_{}".format(Mpbh, as_DM)+sufx, np.transpose([Evec, flux_sec]) )
if Mpbh>=Mevap:
np.savetxt("fluxes/{:.1e}/flux_galac_isDM_{}".format(Mpbh, as_DM)+sufx, np.transpose([Evec, flux_galac]) )
np.savetxt("fluxes/{:.1e}/flux_extragalac_isDM_{}".format(Mpbh, as_DM)+sufx, np.transpose([Evec, flux_sec - flux_galac]) )
onefile.extend(list(flux_sec))
# For masses above ~2.e15, instantaneous flux is approx. equal to the total one for monochromatic
else:
#---------------
# Total spectra
#---------------
# Get the neutrino spectra
# Secondary spectra in BlackHawk already include the primary ones
spec_tot_e = np.genfromtxt(folder + "nu_e_secondary_spectrum"+sufx,skip_header = 1)
spec_tot_mu = np.genfromtxt(folder + "nu_mu_secondary_spectrum"+sufx,skip_header = 1)
spec_tot_tau = np.genfromtxt(folder + "nu_tau_secondary_spectrum"+sufx,skip_header = 1)
Evec = spec_tot_e[0,1:]
timevec = spec_tot_e[1:,0]
# Take into account oscillations
spec_tot_e[1:,1:], spec_tot_mu[1:,1:], spec_tot_tau[1:,1:] = flux_oscillations(spec_tot_e[1:,1:], spec_tot_mu[1:,1:], spec_tot_tau[1:,1:])
# Consider only electronic neutrinos
spec_tot = spec_tot_e
spec_tot[1:,1:] = spec_tot[1:,1:]/2.# 1/2 for taking only neutrino (or antineutrino)
# BlackHawk files often provide a repeated time at some rows, because the precision at writing the output is not enough
# In such cases, take only the first row among those with the same time value
indexes = []
for it, t in enumerate(timevec[:-2]):
if timevec[it+1]<t: print("Array not sorted")
if timevec[it+1]>ageuniverse:
break
elif not timevec[it+1]==t:
indexes.append( it+1 )
# Compute the redshifted spectrum
d2NdEdt_ts = []
for it in indexes:
time_it = timevec[it]
d2NdEdt = spec_tot[1+it,1:]
# To avoid numerical problems in the interpolation
d2NdEdt[d2NdEdt==0.] = 1.e-300
logd2NdEdt_time = interp1d(np.log(Evec),np.log(d2NdEdt), fill_value=-300, bounds_error=False)
rateredshift = np.exp(logd2NdEdt_time(np.log(Evec*(1.+z_from_t_int(timevec[it])))))
d2NdEdt_ts.append( rateredshift )
d2NdEdt_ts = np.array(d2NdEdt_ts)
timevec = timevec[indexes]
reds = z_from_t_int(timevec)
# Integrate the flux until z=0 or until PBHs are completely evaporated
flux_tot = []
for j, EE in enumerate(Evec):
integrand = d2NdEdt_ts[:timevec.shape[0],j]*(1.+reds)
# Introduce a step function to finish the integral at the current age of the universe
integral = integrate.simps( integrand*timevec*np.heaviside(ageuniverse-timevec,0.), np.log(timevec) )
flux_tot.append( n_pbh(Mpbh, as_DM, mass_spec, sig)*integral*c )
# Change units to MeV (factor 1.e3)
Evec, flux_tot = Evec*1.e3, np.array(flux_tot)/1.e3
# If PBHS are DM, include galactic contribution
if Mpbh>=Mevap:
# Find the spectrum evaluated at the current age of the universe
ind = find_nearest(spec_tot[1:,0], ageuniverse, axis=0)
spec_tot_today = spec_tot[1+ind,1:]/1.e3 # Change units to MeV (factor 1.e3)
flux_galac = galactic_flux(Mpbh, spec_tot_today, mass_spec, sig)
flux_tot += flux_galac
np.savetxt("fluxes/{:.1e}/flux_isDM_{}".format(Mpbh, as_DM)+sufx, np.transpose([Evec, flux_tot]) )
if Mpbh>=Mevap:
np.savetxt("fluxes/{:.1e}/flux_galac_isDM_{}".format(Mpbh, as_DM)+sufx, np.transpose([Evec, flux_galac]) )
np.savetxt("fluxes/{:.1e}/flux_extragalac_isDM_{}".format(Mpbh, as_DM)+sufx, np.transpose([Evec, flux_tot - flux_galac]) )
onefile.extend(list(flux_tot))
masses = []
for Mpbh in Mpbhs:
masses.extend(list(np.tile(Mpbh, len(Evec))))
# File with PBH mass, energy and flux, not used in this code, for exporting only
np.savetxt("fluxes/totalflux_Mpbh_from_{:.1e}_to_{:.1e}".format(Mpbhs[0], Mpbhs[-1])+sufx, np.transpose([np.array(masses), np.tile(Evec, len(Mpbhs)), np.array(onefile)]) )
| 41.66426
| 175
| 0.616844
|
5f09be7991c6fb90c60a9464eb765c758129ef52
| 821
|
py
|
Python
|
src/Day30_MinimizeDeviationInArray.py
|
ruarfff/leetcode-jan-2021
|
9436c0d6b82e83c0b21a498c998fa9e41d443d3c
|
[
"MIT"
] | null | null | null |
src/Day30_MinimizeDeviationInArray.py
|
ruarfff/leetcode-jan-2021
|
9436c0d6b82e83c0b21a498c998fa9e41d443d3c
|
[
"MIT"
] | null | null | null |
src/Day30_MinimizeDeviationInArray.py
|
ruarfff/leetcode-jan-2021
|
9436c0d6b82e83c0b21a498c998fa9e41d443d3c
|
[
"MIT"
] | null | null | null |
from typing import List
import heapq
import math
class Solution:
def minimumDeviation(self, nums: List[int]) -> int:
vals = []
minimum = math.inf
minimum_deviation = math.inf
for n in nums:
if n % 2 == 0:
vals.append(-n)
minimum = min(minimum, n)
else:
evened = n * 2
vals.append(-evened)
minimum = min(minimum, evened)
heapq.heapify(vals)
while vals:
e = -heapq.heappop(vals)
minimum_deviation = min(minimum_deviation, e - minimum)
if e % 2 != 0:
return minimum_deviation
e = e // 2
minimum = min(minimum, e)
heapq.heappush(vals, -e)
return minimum_deviation
| 24.878788
| 67
| 0.493301
|
77729546eba505d873b63149ce257117ad211fcc
| 1,567
|
py
|
Python
|
config.py
|
KacperMayday/Snake-Genetic-Algorithm
|
dfb5bd20b06e82e2646b7843a439d67cca1dd957
|
[
"MIT"
] | 1
|
2020-03-14T22:09:19.000Z
|
2020-03-14T22:09:19.000Z
|
config.py
|
KacperMayday/Snake-Genetic-Algorithm
|
dfb5bd20b06e82e2646b7843a439d67cca1dd957
|
[
"MIT"
] | null | null | null |
config.py
|
KacperMayday/Snake-Genetic-Algorithm
|
dfb5bd20b06e82e2646b7843a439d67cca1dd957
|
[
"MIT"
] | null | null | null |
"""Configuration file containing global variables and hyperparameters.
Constants
---------
SCREENWIDTH, SCREENHEIGHT : int
Screen resolution parameters. For training purpose I recommend setting
as small values as possible to make the process faster.
TICKRATE: int
Indicates maximum frames per second. For training set to very high value,
otherwise 30-60 is recommended.
VELOCITY : int
Number of pixels snake moves each frame.
SIZE : int
Size in pixels of apple and every snake's segment. Size is equal to velocity
to achieve grid effect.
POPULATION_SIZE : int
Size of population in each generation. Used in genetic algorithm.
PARENTS_SIZE : int
Number of best parents chosen from each population. Must be even and a divisor
of population_size.
IDLE_TIME : int
Time in millisecond for each apple catch. Prevents snakes looping to infinity.
MUTATION_RATE : int
Maximum per mile change during mutation.
MUTATION_FREQUENCY : int
Mutation frequency per cent.
CROSSING_PROBABILITY : int
Probability of exchanging each chromosome with second parent during crossing_over stage.
EPOCHS : int
Number of epochs during training. Set to 1 if you want to enter show mode.
WIN_MAX, WIN_MEAN : int
Winning conditions, program stops upon reaching them.
"""
SCREENWIDTH = 50 * 12
SCREENHEIGHT = 50 * 12
TICKRATE = 30
VELOCITY = 1 * 12
SIZE = VELOCITY
POPULATION_SIZE = 100
PARENTS_SIZE = 10
IDLE_TIME = 2000 * 3
MUTATION_RATE = 50
MUTATION_FREQUENCY = 25
CROSSING_PROBABILITY = 10
EPOCHS = 1
WIN_MAX = 75
WIN_MEAN = 50
| 31.979592
| 92
| 0.760689
|
43c05848396c01f1e695abdf3f05d7c4ac6bf6bc
| 200
|
py
|
Python
|
aug/ops/__init__.py
|
cta-ai/aug
|
b4efc82874653e2cd0f6b48426568108ee40afd0
|
[
"Apache-2.0"
] | 7
|
2019-07-15T23:01:58.000Z
|
2019-09-25T19:35:07.000Z
|
aug/ops/__init__.py
|
tgilewicz/aug
|
b4efc82874653e2cd0f6b48426568108ee40afd0
|
[
"Apache-2.0"
] | null | null | null |
aug/ops/__init__.py
|
tgilewicz/aug
|
b4efc82874653e2cd0f6b48426568108ee40afd0
|
[
"Apache-2.0"
] | 3
|
2020-06-30T06:36:03.000Z
|
2021-11-04T09:04:47.000Z
|
from os.path import dirname, basename, isfile
import glob
modules = glob.glob(dirname(__file__) + "/*.py")
__all__ = [basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
| 40
| 92
| 0.715
|
0159d53a180aa8c00ad4d6f2db9467f34e56b66a
| 876
|
py
|
Python
|
day9.py
|
kdrag0n/aoc2021
|
469bd861a7d7c0add14412a705ec4cb1e1b5a10f
|
[
"MIT"
] | 2
|
2021-12-04T21:15:14.000Z
|
2021-12-12T09:28:28.000Z
|
day9.py
|
kdrag0n/aoc2021
|
469bd861a7d7c0add14412a705ec4cb1e1b5a10f
|
[
"MIT"
] | null | null | null |
day9.py
|
kdrag0n/aoc2021
|
469bd861a7d7c0add14412a705ec4cb1e1b5a10f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import sys
def ints(itr):
return [int(i) for i in itr]
with open(sys.argv[1], 'r') as f:
file_lines = [l for l in f.read().strip().split('\n')]
in_nums = []
total = 0
result = 0
other = 0
grid=[]
while True:
for l in file_lines:
vals = [int(x) for x in list(l)]
grid += [vals]
break
lows=[]
for y, row in enumerate(grid):
for x, cell in enumerate(row):
pts = [
(x, y-1),
(x, y+1),
(x-1, y),
(x+1, y),
]
adjs = []
for x, y in pts:
if x >= 0 and x < len(row) and y >= 0 and y < len(grid):
adjs += [grid[y][x]]
if all(cell < adj for adj in adjs):
lows += [cell]
print(lows)
print(sum(x+1 for x in lows))
print(f'Total: {total}')
print(f'Result: {result}')
print(f'Other: {other}')
| 18.25
| 68
| 0.481735
|
614c98401e90ebb88ad490d19c083574eeab8b14
| 22,837
|
py
|
Python
|
fsm.py
|
jwang0306/NBA-info-bot
|
4765900043ba4f4481338f865ef2d598462dc558
|
[
"MIT"
] | 1
|
2018-12-19T12:15:45.000Z
|
2018-12-19T12:15:45.000Z
|
fsm.py
|
Jens0306/NBA_and_Movie_Bot
|
4765900043ba4f4481338f865ef2d598462dc558
|
[
"MIT"
] | 4
|
2020-03-24T16:29:12.000Z
|
2021-12-13T19:53:14.000Z
|
fsm.py
|
Jens0306/NBA_and_Movie_Bot
|
4765900043ba4f4481338f865ef2d598462dc558
|
[
"MIT"
] | null | null | null |
from transitions.extensions import GraphMachine
from utils import *
from web_scraper import *
import nltk
from nltk.tokenize import sent_tokenize
from nltk.tokenize import word_tokenize
import random
mytext = "Oh yeah lakers!"
print("================================")
# print(word_tokenize(mytext))
print("================================")
dataInfo = NBA_today()
currentYear = str(dataInfo[0])
currentDate = str(dataInfo[1])
# currentDate = "20181207"
all_teams = []
all_teams = list(NBA_team(currentYear))
# imgUrls = moviePoster(1)
imgUrls = latestPoster()
# print(imgUrls)
class TocMachine(GraphMachine):
def __init__(self, **machine_configs):
self.machine = GraphMachine(
model=self,
**machine_configs
)
#====================================== Conditions ======================================
# # input s to start
# def is_going_to_start(self, event):
# if event.get("postback"):
# text = event['postback']['title']
# return text.lower() == 's'
# elif event.get("message"):
# text = event['message']['text']
# return text.lower() == 's'
# return False
# input help to see how to play with it
def is_going_to_help(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'help'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "help" in text:
return True
return False
def is_going_to_hello(self, event):
if event.get("postback"):
text = event['postback']['title']
# return text.lower() == 'nba today'
return text.lower() == "hello"
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "hello" in text:
return True
return False
# input s to start (NBA today)
def is_going_to_nbaToday(self, event):
if event.get("postback"):
text = event['postback']['title']
# return text.lower() == 'nba today'
return text.lower() == "start"
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "start" in text:
return True
return False
def is_going_to_moviePics(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'movie'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "movie" in text:
return True
return False
def is_going_to_nbaStatus(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'nba stats'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if ("stats" in text or "status" in text):
return True
return False
def is_going_to_nbaStandings(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'standings'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if ("standings" in text or "standing" in text):
return True
return False
def is_going_to_confStandings(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'conference'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if ("conference" in text):
return True
return False
def is_going_to_divStandings(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'division'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if ("division" in text):
return True
return False
def is_going_to_playerInfo(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'players info'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if ("players" in text or "player" in text):
return True
return False
def is_going_to_pickDivision(self, event):
if event.get("postback"):
text = event['postback']['title']
return (text.lower() == 'east' or text.lower() == 'west')
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "east" in text or "west" in text:
return True
return False
def is_going_to_teams(self, event):
if event.get("postback"):
text = event['postback']['title']
return (text.lower() == 'atlantic' or text.lower() == 'central' or text.lower() == 'southeast' or text.lower() == 'southwest' or text.lower() == 'northwest' or text.lower() == 'pacific')
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if ("atlantic" in text or "central" in text or "southeast" in text or "southwest" in text or "northwest" in text or "pacific" in text):
return True
return False
def is_going_to_playerPpg(self, event):
if event.get("postback"):
text = event['postback']['title']
text = text.lower()
if (text in all_teams):
return True
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
for word in text:
if (word in all_teams):
return True
return False
def is_going_to_nbaGames(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'nba games'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "game" in text or "games" in text:
return True
return False
def is_going_to_boxScore(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'box score'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "box" in text or "boxscore" in text:
return True
return False
def is_going_to_nbaNews(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'nba news'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "news" in text or "headline" in text:
return True
return False
def go_back_to_start(self, event):
sender_id = event['sender']['id']
data = "Welcome home."
if event.get("postback"):
text = event['postback']['title']
response = send_text_message(sender_id, data)
return text.lower() == 'home'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "home" in text:
response = send_text_message(sender_id, data)
return True
return False
def go_back_to_nbaToday(self, event):
if event.get("postback"):
text = event['postback']['title']
return text.lower() == 'more nba'
elif event.get("message"):
text = event['message']['text']
text = word_tokenize(text.lower())
if "more" in text and "nba" in text:
return True
return False
#===================================== actions =======================================
# def on_enter_start(self, event):
# print("==========================")
# print("Start Playing")
# print("==========================")
# sender_id = event['sender']['id']
# text = "What can I do for you?"
# quick_replies = [
# {
# "content_type": "text",
# "title": "NBA TODAY",
# "payload": "NBA TODAY"
# },
# {
# "content_type": "text",
# "title": "Search Words",
# "payload": "Search Words"
# }
# ]
# response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_hello(self, event):
sender_id = event['sender']['id']
text = "My name is Jared"
response = send_text_message(sender_id, text)
self.go_back()
def on_enter_help(self, event):
print("==========================")
print("Start Playing")
print("==========================")
sender_id = event['sender']['id']
text = "Enter 'Start' to see NBA stuff.\nEnter 'Help' to see the usage.\nEnter 'Movie' to see top 15 movies' poster"
quick_replies = [
{
"content_type": "text",
"title": "Start",
"payload": "Start"
},
{
"content_type": "text",
"title": "Help",
"payload": "Help"
},
{
"content_type": "text",
"title": "Movie",
"payload": "Movie"
}
]
response = quick_reply_message(sender_id, text, quick_replies)
self.go_back()
def on_enter_moviePics(self, event):
print("==========================")
print("Movie Pictures")
print("==========================")
sender_id = event['sender']['id']
index = random.randint(0, len(imgUrls)-1)
responese = send_image_message(sender_id, imgUrls[index])
# for img in imgUrls:
# responese = send_image_message(sender_id, img)
self.go_back()
def on_enter_nbaToday(self, event):
print("==========================")
print("More NBA Options")
print("==========================")
sender_id = event['sender']['id']
title="NBA TODAY"
image_url="https://i.imgur.com/nWs2EuN.jpg"
subtitle="more options"
data = [
{
"type": "postback",
"title": "NBA Games",
"payload": "NBA Games"
},
{
"type": "postback",
"title": "NBA Stats",
"payload": "NBA Stats"
},
{
"type": "postback",
"title": "NBA News",
"payload": "NBA News"
}
]
response = template_message(sender_id, title, image_url, subtitle, data)
def on_enter_nbaStatus(self, event):
print("==========================")
print("NBA Status")
print("==========================")
sender_id = event['sender']['id']
title="NBA Status"
image_url="https://i.imgur.com/nWs2EuN.jpg"
subtitle="Standings/Players"
data = [
{
"type": "postback",
"title": "Standings",
"payload": "Standings"
},
{
"type": "postback",
"title": "Players Info",
"payload": "Players Info"
}
]
response = template_message(sender_id, title, image_url, subtitle, data)
def on_enter_nbaStandings(self, event):
print("==========================")
print("Standings")
print("==========================")
sender_id = event['sender']['id']
title="Standings"
image_url="https://i.imgur.com/nWs2EuN.jpg"
subtitle="more options"
data = [
{
"type": "postback",
"title": "Conference",
"payload": "Conference"
},
{
"type": "postback",
"title": "Division",
"payload": "Division"
}
]
response = template_message(sender_id, title, image_url, subtitle, data)
def on_enter_confStandings(self, event):
print("==========================")
print("Sort By Conference")
print("==========================")
sender_id = event['sender']['id']
standsList = NBA_standings("conference")
eastStands = standsList[0]
westStands = standsList[1]
response = send_text_message(sender_id, eastStands)
response = send_text_message(sender_id, westStands)
text = "What's next?"
quick_replies = [
{
"content_type": "text",
"title": "More NBA",
"payload": "More NBA"
},
{
"content_type": "text",
"title": "Home",
"payload": "Home"
},
]
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_divStandings(self, event):
print("==========================")
print("Sort By Division")
print("==========================")
sender_id = event['sender']['id']
standsList = NBA_standings("division")
for stands in standsList:
response = send_text_message(sender_id, stands)
text = "What's next?"
quick_replies = [
{
"content_type": "text",
"title": "More NBA",
"payload": "More NBA"
},
{
"content_type": "text",
"title": "Home",
"payload": "Home"
},
]
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_playerInfo(self, event):
print("==========================")
print("Player Info")
print("Choose conference")
print("==========================")
sender_id = event['sender']['id']
title="EAST/WEST"
image_url="https://i.imgur.com/nWs2EuN.jpg"
subtitle="Conference"
data = [
{
"type": "postback",
"title": "WEST",
"payload": "WEST"
},
{
"type": "postback",
"title": "EAST",
"payload": "EAST"
}
]
response = template_message(sender_id, title, image_url, subtitle, data)
def on_enter_pickDivision(self, event):
print("==========================")
print("Pick division")
print("==========================")
sender_id = event['sender']['id']
text = "Pick a division"
division = NBA_division(currentYear)
quick_replies = []
if (event['postback']['payload'].lower() == "east"):
print(division['east'])
for e in division['east']:
quick_replies.append(
{
"content_type": "text",
"title": e,
"payload": e
}
)
else:
for e in division['west']:
quick_replies.append(
{
"content_type": "text",
"title": e,
"payload": e
}
)
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_teams(self, event):
sender_id = event['sender']['id']
text = "Pick a team"
quick_replies = []
print("================================")
print(event['message']['text'].lower())
print("================================")
teams = NBA_division_team(event['message']['text'], currentYear)
for team in teams:
quick_replies.append(
{
"content_type": "text",
"title": team,
"payload": team
}
)
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_playerPpg(self, event):
print("team info:")
sender_id = event['sender']['id']
text = word_tokenize(event['message']['text'].lower())
team = ""
for word in text:
if word in all_teams:
team = word
break
print("================================")
print(team)
print("================================")
data = NBA_teamStats(team)
response = send_text_message(sender_id, data)
# quick reply to go back
text = "What's next?"
quick_replies = [
{
"content_type": "text",
"title": "More NBA",
"payload": "More NBA"
},
{
"content_type": "text",
"title": "Home",
"payload": "Home"
},
]
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_nbaGames(self, event):
print("======================")
print("Games Today")
print("======================")
data = NBA_score(currentDate)
sender_id = event['sender']['id']
response = send_text_message(sender_id, data)
text = "More information?"
quick_replies = [
{
"content_type": "text",
"title": "Box Score",
"payload": "Box Score"
},
{
"content_type": "text",
"title": "More NBA",
"payload": "More NBA"
},
{
"content_type": "text",
"title": "Home",
"payload": "Home"
}
]
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_boxScore(self, event):
print("======================")
print("Box Score")
print("======================")
sender_id = event['sender']['id']
data = NBA_boxScore(currentDate)
response = send_text_message(sender_id, data)
text = "What's next?"
quick_replies = [
{
"content_type": "text",
"title": "More NBA",
"payload": "More NBA"
},
{
"content_type": "text",
"title": "Home",
"payload": "Home"
}
]
response = quick_reply_message(sender_id, text, quick_replies)
def on_enter_nbaNews(self, event):
print("======================")
print("News")
print("======================")
sender_id = event['sender']['id']
data = NBA_news()
response = send_text_message(sender_id, data)
text = "What's next?"
quick_replies = [
{
"content_type": "text",
"title": "More NBA",
"payload": "More NBA"
},
{
"content_type": "text",
"title": "Home",
"payload": "Home"
}
]
response = quick_reply_message(sender_id, text, quick_replies)
# def on_exit_state1(self):
# print('Leaving state1')
# def on_exit_state2(self):
# print("Leaving state2")
# def on_exit_state3(self):
# print("Leaving state3")
# def on_exit_state1(self):
# print("Exiting state1")
# def is_going_to_state1(self, event):
# if event.get("postback"):
# text = event['postback']['title']
# return text.lower() == 'nba games'
# elif event.get("message"):
# text = event['message']['text']
# return text.lower() == 'nba games'
# return False
# def is_going_to_state2(self, event):
# if event.get("message"):
# text = event['message']['text']
# return text.lower() == '2'
# return False
# def is_going_to_state3(self, event):
# if event.get("message"):
# text = event['message']['text']
# return text.lower() == '3'
# return False
# def on_enter_state1(self, event):
# print("I'm entering state1")
# data = NBA_score()
# # NBA_score()
# sender_id = event['sender']['id']
# responese = send_text_message(sender_id, data)
# self.go_back()
# def on_exit_state1(self):
# print('Leaving state1')
# def on_enter_state2(self, event):
# print("I'm entering state2")
# para = translate()
# sender_id = event['sender']['id']
# responese = send_image_message(sender_id, "https://i.imgur.com/nbPuP6V.jpg")
# self.go_back()
# def on_exit_state2(self):
# print('Leaving state2')
# def on_enter_state3(self, event):
# print("I'm entering state3")
# sender_id = event['sender']['id']
# title="選擇服務"
# image_url="https://i.imgur.com/nbPuP6V.jpg"
# subtitle="請選擇"
# data = [
# {
# "type": "postback",
# "title": "NBA Games",
# "payload": "NBA Games"
# },
# {
# "type": "postback",
# "title": "法文單字查詢",
# "payload": "法文單字查詢"
# },
# {
# "type": "postback",
# "title": "Cafe",
# "payload": "Cafe"
# }
# ]
# responese = template_message(sender_id, title, image_url, subtitle, data)
# self.go_back()
# def on_exit_state3(self):
# print("Leaving state3")
| 33.145138
| 198
| 0.465079
|
ea9334b81ee9177869b41f96fb5540940e2c00aa
| 149
|
py
|
Python
|
tests/parser/builtins.10a.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/builtins.10a.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/builtins.10a.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
a("10.28399"). a("3.7").
b(X,Y) :- a(X), a(Y), X > Y.
"""
output = """
a("10.28399"). a("3.7").
b(X,Y) :- a(X), a(Y), X > Y.
"""
| 13.545455
| 29
| 0.342282
|
be0ef680258757e68cd5f57d4c0c92c75262bae4
| 14,536
|
py
|
Python
|
venv/lib/python3.5/site-packages/radon/tests/test_cli_tools.py
|
prashant0598/CoffeeApp
|
4fa006aebf06e12ed34766450ddcfa548ee63307
|
[
"MIT"
] | null | null | null |
venv/lib/python3.5/site-packages/radon/tests/test_cli_tools.py
|
prashant0598/CoffeeApp
|
4fa006aebf06e12ed34766450ddcfa548ee63307
|
[
"MIT"
] | null | null | null |
venv/lib/python3.5/site-packages/radon/tests/test_cli_tools.py
|
prashant0598/CoffeeApp
|
4fa006aebf06e12ed34766450ddcfa548ee63307
|
[
"MIT"
] | null | null | null |
import os
import sys
import json
import unittest
import radon.cli.tools as tools
from radon.visitors import Function, Class
from radon.raw import Module
try:
import unittest.mock as mock
except ImportError:
import mock
from paramunittest import parametrized
def fake_isfile(filename):
if filename == 'file.py':
return True
return False
def fake_walk(start):
dirs = ['tests', 'sub', '.hid']
contents = {'tests': ['test_amod.py', 'run.py', '.hid.py'],
'sub': ['amod.py', 'bmod.py']}
yield '.', dirs, ['tox.ini', 'amod.py', 'test_all.py', 'fake.yp', 'noext']
for d in dirs:
yield './{0}'.format(d), [], contents[d]
class TestGenericTools(unittest.TestCase):
def test_open(self):
with tools._open('-') as fobj:
self.assertTrue(fobj is sys.stdin)
m = mock.mock_open()
with mock.patch('radon.cli.tools.open', m, create=True):
tools._open('randomfile.py').__enter__()
m.assert_called_with('randomfile.py')
class TestIterFilenames(unittest.TestCase):
def setUp(self):
self.iter_files = lambda *a, **kw: list(tools.iter_filenames(*a, **kw))
def assertPEqual(self, a, b):
paths = [list(map(os.path.normpath, p)) for p in (a, b)]
self.assertEqual(*paths)
def test_stdin(self):
self.assertEqual(self.iter_files(['-']), ['-'])
@mock.patch('radon.cli.tools.os.path')
@mock.patch('radon.cli.tools.os')
def test_all(self, os_mod, os_path_mod):
os_path_mod.normpath = os.path.normpath
os_path_mod.basename = os.path.basename
os_path_mod.join = os.path.join
os_path_mod.isfile.side_effect = fake_isfile
os_mod.walk = fake_walk
self.assertPEqual(self.iter_files(['file.py', 'random/path']),
['file.py', 'amod.py', 'test_all.py',
'tests/test_amod.py', 'tests/run.py', 'sub/amod.py',
'sub/bmod.py'])
self.assertPEqual(self.iter_files(['file.py', 'random/path'],
'test_*'),
['file.py', 'amod.py', 'tests/test_amod.py',
'tests/run.py', 'sub/amod.py', 'sub/bmod.py'])
self.assertPEqual(self.iter_files(['file.py', 'random/path'],
'*test_*'),
['file.py', 'amod.py', 'tests/run.py', 'sub/amod.py',
'sub/bmod.py'])
self.assertPEqual(self.iter_files(['file.py', 'random/path'],
'*/test_*,amod*'),
['file.py', 'test_all.py', 'tests/run.py',
'sub/amod.py', 'sub/bmod.py'])
self.assertPEqual(self.iter_files(['file.py', 'random/path'], None,
'tests'),
['file.py', 'amod.py', 'test_all.py', 'sub/amod.py',
'sub/bmod.py'])
self.assertPEqual(self.iter_files(['file.py', 'random/path'], None,
'tests,sub'),
['file.py', 'amod.py', 'test_all.py'])
CC_RESULTS_CASES = [
([
Function('name', 12, 0, 16, False, None, [], 6),
], {
'type': 'function', 'name': 'name', 'lineno': 12, 'col_offset': 0,
'endline': 16, 'closures': [], 'complexity': 6, 'rank': 'B',
}),
([
Class('Classname', 17, 0, 29, [
Function('name', 19, 4, 26, True, 'Classname', [], 7),
], [], 7),
], {
'type': 'class', 'name': 'Classname', 'lineno': 17, 'col_offset': 0,
'endline': 29, 'complexity': 7, 'rank': 'B', 'methods': [
{
'type': 'method', 'lineno': 19, 'col_offset': 4, 'endline': 26,
'closures': [], 'complexity': 7, 'rank': 'B', 'classname':
'Classname', 'name': 'name',
}
],
}),
([
Function('name', 12, 0, 16, False, None, [
Function('aux', 13, 4, 17, False, None, [], 4),
], 10),
], {
'type': 'function', 'name': 'name', 'lineno': 12, 'col_offset': 0,
'endline': 16, 'complexity': 10, 'rank': 'B', 'closures': [
{
'name': 'aux', 'lineno': 13, 'col_offset': 4, 'endline': 17,
'closures': [], 'complexity': 4, 'rank': 'A', 'type':
'function',
}
]
}),
]
@parametrized(*CC_RESULTS_CASES)
class TestCCToDict(unittest.TestCase):
def setParameters(self, blocks, **dict_result):
self.blocks = blocks
self.dict_result = dict_result
def testCCToDict(self):
self.assertEqual(tools.cc_to_dict(self.blocks), self.dict_result)
CC_TO_XML_CASE = [
{'closures': [], 'endline': 16, 'complexity': 6, 'lineno': 12, 'is_method':
False, 'name': 'name', 'col_offset': 0, 'rank': 'B'},
{'complexity': 8, 'endline': 29, 'rank': 'B', 'lineno': 17, 'name':
'Classname', 'col_offset': 0},
{'classname': 'Classname', 'closures': [], 'endline': 26, 'complexity': 7,
'lineno': 19, 'is_method': True, 'name': 'name', 'col_offset': 4,
'rank': 'B'},
{'closures': [], 'endline': 17, 'complexity': 4, 'lineno': 13, 'is_method':
False, 'name': 'aux', 'col_offset': 4, 'rank': 'A'},
{'endline': 16, 'complexity': 10, 'lineno': 12, 'is_method': False, 'name':
'name', 'col_offset': 0, 'rank': 'B'},
]
CC_TO_CODECLIMATE_CASE = [
{'closures': [], 'endline': 16, 'complexity': 6, 'lineno': 12, 'type':
'function', 'name': 'foo', 'col_offset': 0, 'rank': 'B'},
{'complexity': 8, 'endline': 29, 'rank': 'B', 'lineno': 17, 'type': 'class',
'name': 'Classname', 'col_offset': 0},
{'closures': [], 'endline': 17, 'complexity': 4, 'lineno': 13, 'type':
'method', 'name': 'bar', 'col_offset': 4, 'rank': 'A'},
]
class TestDictConversion(unittest.TestCase):
def test_raw_to_dict(self):
self.assertEqual(tools.raw_to_dict(Module(103, 123, 98, 8, 19, 5, 3)),
{'loc': 103, 'lloc': 123, 'sloc': 98, 'comments': 8,
'multi': 19, 'blank': 5, 'single_comments': 3})
def test_cc_to_xml(self):
self.assertEqual(tools.dict_to_xml({'filename': CC_TO_XML_CASE}),
'''<ccm>
<metric>
<complexity>6</complexity>
<unit>name</unit>
<classification>B</classification>
<file>filename</file>
<startLineNumber>12</startLineNumber>
<endLineNumber>16</endLineNumber>
</metric>
<metric>
<complexity>8</complexity>
<unit>Classname</unit>
<classification>B</classification>
<file>filename</file>
<startLineNumber>17</startLineNumber>
<endLineNumber>29</endLineNumber>
</metric>
<metric>
<complexity>7</complexity>
<unit>Classname.name</unit>
<classification>B</classification>
<file>filename</file>
<startLineNumber>19</startLineNumber>
<endLineNumber>26</endLineNumber>
</metric>
<metric>
<complexity>4</complexity>
<unit>aux</unit>
<classification>A</classification>
<file>filename</file>
<startLineNumber>13</startLineNumber>
<endLineNumber>17</endLineNumber>
</metric>
<metric>
<complexity>10</complexity>
<unit>name</unit>
<classification>B</classification>
<file>filename</file>
<startLineNumber>12</startLineNumber>
<endLineNumber>16</endLineNumber>
</metric>
</ccm>'''.replace('\n', '').replace(' ', ''))
def test_cc_error_to_codeclimate(self):
error_result = {
'error': 'Error: invalid syntax (<unknown>, line 100)'
}
expected_results = [
json.dumps({
"description":"Error: Error: invalid syntax (<unknown>, line 100)",
"check_name":"Complexity",
"content": { "body": "We encountered an error attempting to analyze this line." },
"location": { "path": "filename", "lines": {"begin": 100, "end": 100}},
"type":"issue",
"categories": ["Bug Risk"],
"remediation_points": 1000000,
"fingerprint": "10ac332cd7f638664e8865b098a1707c"
}),
]
actual_results = tools.dict_to_codeclimate_issues({"filename": error_result})
actual_sorted = []
for i in actual_results:
actual_sorted.append(json.loads(i))
expected_sorted = []
for i in expected_results:
expected_sorted.append(json.loads(i))
self.assertEqual(actual_sorted, expected_sorted)
def test_cc_to_codeclimate(self):
actual_results = tools.dict_to_codeclimate_issues({'filename': CC_TO_CODECLIMATE_CASE})
expected_results = [
json.dumps({
"description":"Cyclomatic complexity is too high in function foo. (6)",
"check_name":"Complexity",
"content": { "body": tools.get_content()},
"location": { "path": "filename", "lines": {"begin": 12, "end": 16}},
"type":"issue",
"categories": ["Complexity"],
"remediation_points": 1100000,
"fingerprint": "afbe2b8d9a57fde5f3235ec97e7a22e1"
}),
json.dumps({
"description":"Cyclomatic complexity is too high in class Classname. (8)",
"check_name":"Complexity",
"content": {"body": tools.get_content()},
"location": {"path": "filename", "lines": {"begin": 17, "end": 29}},
"type":"issue",
"categories": ["Complexity"],
"remediation_points": 1300000,
"fingerprint": "8caecbb525375d825b95c23bc8f881d7"
}),
]
actual_sorted = []
for i in actual_results:
actual_sorted.append(json.loads(i))
expected_sorted = []
for i in expected_results:
expected_sorted.append(json.loads(i))
self.assertEqual(actual_sorted, expected_sorted)
CC_TO_TERMINAL_CASES = [
Class(name='Classname', lineno=17, col_offset=0, endline=29,
methods=[Function(name='meth', lineno=19, col_offset=4, endline=26,
is_method=True, classname='Classname', closures=[],
complexity=4)],
inner_classes=[], real_complexity=4),
Function(name='meth', lineno=19, col_offset=4, endline=26, is_method=True,
classname='Classname', closures=[], complexity=7),
Function(name='f1', lineno=12, col_offset=0, endline=16, is_method=False,
classname=None, closures=[], complexity=14),
Function(name='f2', lineno=12, col_offset=0, endline=16, is_method=False,
classname=None, closures=[], complexity=22),
Function(name='f3', lineno=12, col_offset=0, endline=16, is_method=False,
classname=None, closures=[], complexity=32),
Function(name='f4', lineno=12, col_offset=0, endline=16, is_method=False,
classname=None, closures=[], complexity=41),
]
class TestCCToTerminal(unittest.TestCase):
def test_cc_to_terminal(self):
# do the patching
tools.LETTERS_COLORS = dict((l, '<!{0}!>'.format(l)) for l in 'FMC')
tools.RANKS_COLORS = dict((r, '<|{0}|>'.format(r)) for r in 'ABCDEF')
tools.BRIGHT = '@'
tools.RESET = '__R__'
results = CC_TO_TERMINAL_CASES
res = [
'@<!C!>C __R__17:0 Classname - <|A|>A (4)__R__',
'@<!M!>M __R__19:4 Classname.meth - <|B|>B (7)__R__',
'@<!F!>F __R__12:0 f1 - <|C|>C (14)__R__',
'@<!F!>F __R__12:0 f2 - <|D|>D (22)__R__',
'@<!F!>F __R__12:0 f3 - <|E|>E (32)__R__',
'@<!F!>F __R__12:0 f4 - <|F|>F (41)__R__',
]
res_noshow = ['{0}__R__'.format(r[:r.index('(') - 1]) for r in res]
self.assertEqual(tools.cc_to_terminal(results, False, 'A', 'F', False),
(res_noshow, 120, 6))
self.assertEqual(tools.cc_to_terminal(results, True, 'A', 'F', False),
(res, 120, 6))
self.assertEqual(tools.cc_to_terminal(results, True, 'A', 'D', False),
(res[:-2], 47, 4))
self.assertEqual(tools.cc_to_terminal(results, False, 'A', 'D', False),
(res_noshow[:-2], 47, 4))
self.assertEqual(tools.cc_to_terminal(results, True, 'C', 'F', False),
(res[2:], 109, 4))
self.assertEqual(tools.cc_to_terminal(results, True, 'B', 'E', False),
(res[1:-1], 75, 4))
self.assertEqual(tools.cc_to_terminal(results, True, 'B', 'F', True),
(res[1:], 120, 6))
| 42.133333
| 114
| 0.476059
|
d4c29269dec60fcf36d30990ccae2bd06791ebc5
| 3,538
|
py
|
Python
|
v1.0.0.test/toontown/coghq/FactoryEntityCreatorAI.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-01T15:46:43.000Z
|
2021-07-23T16:26:48.000Z
|
v1.0.0.test/toontown/coghq/FactoryEntityCreatorAI.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 1
|
2019-06-29T03:40:05.000Z
|
2021-06-13T01:15:16.000Z
|
v1.0.0.test/toontown/coghq/FactoryEntityCreatorAI.py
|
TTOFFLINE-LEAK/ttoffline
|
bb0e91704a755d34983e94288d50288e46b68380
|
[
"MIT"
] | 4
|
2019-07-28T21:18:46.000Z
|
2021-02-25T06:37:25.000Z
|
from otp.level import EntityCreatorAI
from direct.showbase.PythonUtil import Functor
import DistributedBeanBarrelAI, DistributedButtonAI, DistributedCrateAI, DistributedLiftAI, DistributedDoorEntityAI, DistributedGagBarrelAI, DistributedGridAI
from toontown.suit import DistributedGridGoonAI
from toontown.suit import DistributedGoonAI
import DistributedHealBarrelAI, DistributedStomperPairAI, DistributedTriggerAI, DistributedStomperAI, DistributedLaserFieldAI, DistributedSecurityCameraAI, DistributedMoverAI, DistributedElevatorMarkerAI, DistributedSinkingPlatformAI, ActiveCellAI, CrusherCellAI, DirectionalCellAI, FactoryLevelMgrAI, BattleBlockerAI, DistributedGolfGreenGameAI
from toontown.coghq import DistributedMoleFieldAI
from toontown.coghq import DistributedMazeAI
class FactoryEntityCreatorAI(EntityCreatorAI.EntityCreatorAI):
def __init__(self, level):
EntityCreatorAI.EntityCreatorAI.__init__(self, level)
cDE = EntityCreatorAI.createDistributedEntity
cLE = EntityCreatorAI.createLocalEntity
nothing = EntityCreatorAI.nothing
self.privRegisterTypes({'activeCell': Functor(cDE, ActiveCellAI.ActiveCellAI), 'crusherCell': Functor(cDE, CrusherCellAI.CrusherCellAI),
'battleBlocker': Functor(cDE, BattleBlockerAI.BattleBlockerAI),
'beanBarrel': Functor(cDE, DistributedBeanBarrelAI.DistributedBeanBarrelAI),
'button': DistributedButtonAI.DistributedButtonAI,
'conveyorBelt': nothing,
'crate': Functor(cDE, DistributedCrateAI.DistributedCrateAI),
'directionalCell': Functor(cDE, DirectionalCellAI.DirectionalCellAI),
'door': DistributedDoorEntityAI.DistributedDoorEntityAI,
'gagBarrel': Functor(cDE, DistributedGagBarrelAI.DistributedGagBarrelAI),
'gear': nothing,
'goon': Functor(cDE, DistributedGoonAI.DistributedGoonAI),
'gridGoon': Functor(cDE, DistributedGridGoonAI.DistributedGridGoonAI),
'golfGreenGame': Functor(cDE, DistributedGolfGreenGameAI.DistributedGolfGreenGameAI),
'goonClipPlane': nothing,
'grid': Functor(cDE, DistributedGridAI.DistributedGridAI),
'healBarrel': Functor(cDE, DistributedHealBarrelAI.DistributedHealBarrelAI),
'levelMgr': Functor(cLE, FactoryLevelMgrAI.FactoryLevelMgrAI),
'lift': Functor(cDE, DistributedLiftAI.DistributedLiftAI),
'mintProduct': nothing,
'mintProductPallet': nothing,
'mintShelf': nothing,
'mover': Functor(cDE, DistributedMoverAI.DistributedMoverAI),
'paintMixer': nothing,
'pathMaster': nothing,
'rendering': nothing,
'platform': nothing,
'sinkingPlatform': Functor(cDE, DistributedSinkingPlatformAI.DistributedSinkingPlatformAI),
'stomper': Functor(cDE, DistributedStomperAI.DistributedStomperAI),
'stomperPair': Functor(cDE, DistributedStomperPairAI.DistributedStomperPairAI),
'laserField': Functor(cDE, DistributedLaserFieldAI.DistributedLaserFieldAI),
'securityCamera': Functor(cDE, DistributedSecurityCameraAI.DistributedSecurityCameraAI),
'elevatorMarker': Functor(cDE, DistributedElevatorMarkerAI.DistributedElevatorMarkerAI),
'trigger': DistributedTriggerAI.DistributedTriggerAI,
'moleField': Functor(cDE, DistributedMoleFieldAI.DistributedMoleFieldAI),
'maze': Functor(cDE, DistributedMazeAI.DistributedMazeAI)})
| 69.372549
| 345
| 0.745336
|
830606e81df0f46cb03050ec083842683c611660
| 5,238
|
py
|
Python
|
tests/layers/test_gated_average_layer.py
|
vishalbelsare/neupy
|
684313cdaddcad326f2169384fb15ec3aa29d991
|
[
"MIT"
] | null | null | null |
tests/layers/test_gated_average_layer.py
|
vishalbelsare/neupy
|
684313cdaddcad326f2169384fb15ec3aa29d991
|
[
"MIT"
] | null | null | null |
tests/layers/test_gated_average_layer.py
|
vishalbelsare/neupy
|
684313cdaddcad326f2169384fb15ec3aa29d991
|
[
"MIT"
] | null | null | null |
import numpy as np
from neupy import layers
from neupy.utils import asfloat
from neupy.exceptions import LayerConnectionError
from base import BaseTestCase
class GatedAverageTestCase(BaseTestCase):
def test_gated_average_layer_negative_index(self):
gated_avg_layer = layers.GatedAverage(gating_layer_index=-1)
layers.join([
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(8),
layers.Input(10) > layers.Softmax(2),
], gated_avg_layer)
self.assertEqual(gated_avg_layer.output_shape, (8,))
self.assertEqual(gated_avg_layer.input_shape, [(8,), (8,), (2,)])
gated_avg_layer = layers.GatedAverage(gating_layer_index=-3)
layers.join([
layers.Input(10) > layers.Softmax(2),
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
self.assertEqual(gated_avg_layer.output_shape, (8,))
self.assertEqual(gated_avg_layer.input_shape, [(2,), (8,), (8,)])
def test_gated_average_layer_exceptions_index_position(self):
gated_avg_layer = layers.GatedAverage(gating_layer_index=3)
with self.assertRaisesRegexp(LayerConnectionError, "Invalid index"):
layers.join([
layers.Input(20) > layers.Relu(8),
layers.Input(10) > layers.Softmax(2),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
gated_avg_layer = layers.GatedAverage(gating_layer_index=-4)
with self.assertRaisesRegexp(LayerConnectionError, "Invalid index"):
layers.join([
layers.Input(10) > layers.Softmax(2),
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
def test_gated_average_layer_exceptions(self):
gated_avg_layer = layers.GatedAverage()
with self.assertRaisesRegexp(LayerConnectionError, "should be vector"):
layers.join([
layers.Input((10, 3, 3)), # shape not 1d
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
gated_avg_layer = layers.GatedAverage()
error_message = "only 3 networks, got 2 networks"
with self.assertRaisesRegexp(LayerConnectionError, error_message):
layers.join([
layers.Input(10) > layers.Softmax(3),
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
gated_avg_layer = layers.GatedAverage()
error_message = "expect to have the same shapes"
with self.assertRaisesRegexp(LayerConnectionError, error_message):
layers.join([
layers.Input(10) > layers.Softmax(2),
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(10),
], gated_avg_layer)
def test_gated_average_layer_non_default_index(self):
gated_avg_layer = layers.GatedAverage(gating_layer_index=1)
layers.join([
layers.Input(20) > layers.Relu(8),
layers.Input(10) > layers.Softmax(2),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
self.assertEqual(gated_avg_layer.output_shape, (8,))
self.assertEqual(gated_avg_layer.input_shape, [(8,), (2,), (8,)])
def test_gated_average_layer_output_shape(self):
gated_avg_layer = layers.GatedAverage()
self.assertIsNone(gated_avg_layer.output_shape)
layers.join([
layers.Input(10) > layers.Softmax(2),
layers.Input(20) > layers.Relu(8),
layers.Input(20) > layers.Relu(8),
], gated_avg_layer)
self.assertEqual(gated_avg_layer.output_shape, (8,))
self.assertEqual(gated_avg_layer.input_shape, [(2,), (8,), (8,)])
def test_gated_average_layer_output(self):
input_layer = layers.Input(10)
network = layers.join(
[
input_layer > layers.Softmax(2),
input_layer > layers.Relu(8),
input_layer > layers.Relu(8),
],
layers.GatedAverage()
)
predict = network.compile()
random_input = asfloat(np.random.random((20, 10)))
actual_output = predict(random_input)
self.assertEqual(actual_output.shape, (20, 8))
def test_gated_average_layer_multi_dimensional_inputs(self):
input_layer = layers.Input((1, 5, 5))
network = layers.join(
[
input_layer > layers.Reshape() > layers.Softmax(2),
input_layer > layers.Convolution((3, 2, 2)),
input_layer > layers.Convolution((3, 2, 2)),
],
layers.GatedAverage()
)
self.assertEqual(network.input_shape, (1, 5, 5))
self.assertEqual(network.output_shape, (3, 4, 4))
predict = network.compile()
random_input = asfloat(np.random.random((8, 1, 5, 5)))
actual_output = predict(random_input)
self.assertEqual(actual_output.shape, (8, 3, 4, 4))
| 38.514706
| 79
| 0.605002
|
aa7e82b5c9c4b12c8050fe564d702b4f9229fe56
| 1,610
|
py
|
Python
|
plotboi.py
|
RustyBamboo/hash-shader
|
2604a7885f9db97518d832a997d387e78f873fa2
|
[
"MIT"
] | 41
|
2021-04-25T23:22:02.000Z
|
2022-03-20T16:46:53.000Z
|
plotboi.py
|
RustyBamboo/hash-shader
|
2604a7885f9db97518d832a997d387e78f873fa2
|
[
"MIT"
] | null | null | null |
plotboi.py
|
RustyBamboo/hash-shader
|
2604a7885f9db97518d832a997d387e78f873fa2
|
[
"MIT"
] | 3
|
2021-06-28T17:07:44.000Z
|
2022-03-10T04:25:51.000Z
|
#!/usr/bin/env python2
import subprocess
import sys
import os
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import numpy as np
import cpuinfo
class BlockChainValidator:
def __init__(self, cmd, name, color):
self.data = []
self.cmd = os.path.join(os.path.dirname(__file__), cmd)
self.name = name
self.color = color
def run(self, filename, n):
data_point = subprocess.check_output([self.cmd, filename, str(n)]).decode("utf-8")
[b, t] = str(data_point).split()
self.data.append([int(b), float(t)])
def plot(self):
plt.plot(np.array(self.data)[:,0], np.array(self.data)[:,1], c=self.color, label=self.name, linewidth=7.0)
gpu_name = subprocess.check_output(['nvidia-smi', '-L'])
gpu_name = gpu_name[:gpu_name.find('UUID')-1]
validators = [BlockChainValidator('c/cuda_bitcoin', 'Cuda: ' + gpu_name, 'red'),
BlockChainValidator('c/cpu_bitcoin', 'CPU: ' + cpuinfo.get_cpu_info()['brand_raw'] , 'blue'),
BlockChainValidator('vulkan/target/release/blockchain-val', 'rust gpu / Vulkan: ' + gpu_name, 'green')]
# cuda crashes on some numbers for some reason
for blocks in [10, 100, 999, 14999, -1]:
print blocks
for comp in validators:
comp.run(sys.argv[1], blocks)
plt.figure(figsize=(20,20))
plt.rcParams.update({'font.size': 22})
for comp in validators:
comp.plot()
plt.ylabel('computatiopn time (ms)');
plt.xlabel('number of blocks verified');
plt.legend(loc='upper left')
plt.savefig(os.path.join(os.path.dirname(__file__), 'docs/figs/performance_plot.png'))
| 34.255319
| 117
| 0.668944
|
e63ba48516c1aa461a410b7890320226c46bd08f
| 104
|
py
|
Python
|
cooler/_version.py
|
mirnylab/cooler
|
987e87dd357c99a0a070b8a25cb7aaef86f1655c
|
[
"BSD-3-Clause"
] | 106
|
2016-01-15T21:24:41.000Z
|
2020-09-24T12:15:13.000Z
|
cooler/_version.py
|
mirnylab/cooler
|
987e87dd357c99a0a070b8a25cb7aaef86f1655c
|
[
"BSD-3-Clause"
] | 190
|
2016-02-16T03:35:30.000Z
|
2020-09-25T19:16:28.000Z
|
cooler/_version.py
|
mirnylab/cooler
|
987e87dd357c99a0a070b8a25cb7aaef86f1655c
|
[
"BSD-3-Clause"
] | 43
|
2016-08-26T18:51:21.000Z
|
2020-09-08T13:38:50.000Z
|
__version__ = "0.8.11"
__format_version__ = 3
__format_version_mcool__ = 2
__format_version_scool__ = 1
| 20.8
| 28
| 0.807692
|
97a796b0c4d88779d1248191f979cdf202b7d2da
| 5,165
|
py
|
Python
|
local/cpp_algorithms/common_helpers.py
|
andresperez86/vision_cpp
|
505d524f598e1b31f4d80f909cf9d8fd21f5dbe5
|
[
"MIT"
] | 1
|
2021-09-16T06:13:09.000Z
|
2021-09-16T06:13:09.000Z
|
local/cpp_algorithms/common_helpers.py
|
andresperez86/vision_cpp
|
505d524f598e1b31f4d80f909cf9d8fd21f5dbe5
|
[
"MIT"
] | null | null | null |
local/cpp_algorithms/common_helpers.py
|
andresperez86/vision_cpp
|
505d524f598e1b31f4d80f909cf9d8fd21f5dbe5
|
[
"MIT"
] | null | null | null |
"""
Some helper functions that are often used.
"""
import numpy as np
import matplotlib.pyplot as plt
from .constants import OB, NO
from pathlib import Path
from PIL import Image
RES = [(32, 32), (50, 50), (50, 144), (144, 255), (256, 256)]
def adjacency_test(path, exactly_one=True):
"""
Checks all points in a path for L1 adjacency.
"""
prev_point = path[0]
for i, point in enumerate(path[1:]):
x, y = prev_point
x_, y_ = point
dist = np.abs(x - x_) + np.abs(y - y_)
prev_point = point
if exactly_one and dist == 1:
continue
elif dist <= 1:
continue
else:
return i - 1
return True
def generate_no_obs_area_map(resolutions=RES):
"""
resolutions : list of tuples [(rows, cols)]
"""
area_maps = []
for res in resolutions:
area_maps.append(np.zeros(res))
return area_maps
def generate_point_obstacles(area_map, p=0.5):
"""
Adds point obstacles to the area_map with the given
probability `p`, if `p==1` then the entire map will
be covered.
"""
area_map = area_map.copy()
area_map[np.random.rand(*area_map.shape) < p] = -1
return area_map
def get_area_map(path, area=0, obs=-1):
"""
path : path to area map png, png should have only
0 255 as values.
returns area_map with cell values
obstacle : OBS
non obstacle : NOB
"""
am = np.array(Image.open(path))
ma = np.array(am).mean(axis=2) == 255
am = np.int8(np.zeros(ma.shape))
am[ma] = area
am[~ma] = obs
return am
def imshow_scatter(path, color="orange", alpha=1, s=20):
"""
Prints the points in the path
"""
x, y = np.array(path).T
plt.scatter(y, x, color=color, alpha=alpha, s=s)
def imshow(area_map, r=1, c=1, i=1, figsize=(5, 5), cmap="viridis"):
"""
Display with no interpolation.
"""
if r < 2 and c < 2 or i == 1:
plt.figure(figsize=figsize)
plt.subplot(r, c, i)
ax = plt.imshow(area_map, interpolation='none', cmap=cmap)
plt.axis('on');
return ax
def plot(cp, alpha=0.8, color="lightblue"):
"""
Plot coverage path as a line.
"""
cp = np.array(cp)
x, y = cp.T
plt.plot(y, x, alpha=alpha, color=color)
def get_random_coords(area_map, n=2, obs=-1):
"""
Return random coords from the map
where there are no obstacles.
n : number of random points
obs : obstacle value on the area map
"""
# r = lambda x: np.random.randint(0, x)
b1, b2 = area_map.shape
coords = []
for i in range(b1):
for j in range(b2-1, 0, -1):
p = (i, j)
if area_map[p] != obs:
coords.append(p)
break
return coords
def get_end_coords(area_map, n=2, obs=-1):
"""
Return random coords from the map
where there are no obstacles.
n : number of random points
obs : obstacle value on the area map
"""
# r = lambda x: np.random.randint(0, x)
b1, b2 = area_map.shape
point = []
for i in range(b2-1, 0, -1):
for j in range(b1-1, 0, -1):
p = (j, i)
if area_map[p] != obs:
point.append(p)
break
return point
def set_val(area_map, coords, val):
"""
Set `val` at given `coords` on
the `area_map`
area_map : 2D numpy array
coords : list of (x,y) tuples
val : int of value to set
"""
x, y = np.array(coords).T
area_map[x, y] = val
def is_bounded(coord, shape):
"""
Checks if a coord (x,y) is within bounds.
"""
x, y = coord
g, h = shape
lesser = x < 0 or y < 0
greater = x >= g or y >= h
if lesser or greater:
return False
return True
def is_valid(coord, area_map, obstacle=-1):
"""
Check is a coord (x,y) is bounded and not
on an obstacle.
"""
coord = tuple(coord)
is_b = is_bounded(coord, area_map.shape)
if is_b:
is_on_obs = False
if isinstance(obstacle, list):
for obs in obstacle:
is_on_obs |= area_map[coord] == obs
else:
is_on_obs = area_map[coord] == obstacle
if not is_on_obs:
return True
return False
def get_all_area_maps(folder_path):
"""
Returns size sorted list of area maps.
folder_path : path to the folder contiaining the maps (.png)
"""
ams = []
for path in Path(folder_path).iterdir():
try:
ams.append(get_area_map(path))
except:
continue
am_idx = np.array([am.size for am in ams]).argsort()
return list(np.array(ams)[am_idx])
def get_drone_map(A, i, obstacle=OB, coverage=NO):
"""
Returns area map for a single drone
from the assignment matrix.
PARAMETERS
---
A : assignment matrix obtained from darp.
i : the drone number (cell value of A).
obstacle : value to assign the obstacle
coverage : value to assign the coverage area
"""
am = A.copy()
x, y = np.where(am != i)
am[x, y] = obstacle
x, y = np.where(am == i)
am[x, y] = coverage
return am
| 23.692661
| 68
| 0.571926
|
6bdcc4ec6a85384ec927ff120aa0b58a62dd6264
| 889
|
py
|
Python
|
conda_lock/src_parser/selectors.py
|
kev-zheng/conda-lock
|
2717932c6a0d1a5fab4546fafe2d2f85e378f3ba
|
[
"MIT"
] | 30
|
2019-09-09T17:09:25.000Z
|
2022-03-02T18:47:49.000Z
|
conda_lock/src_parser/selectors.py
|
kev-zheng/conda-lock
|
2717932c6a0d1a5fab4546fafe2d2f85e378f3ba
|
[
"MIT"
] | 25
|
2019-09-13T18:43:37.000Z
|
2020-08-15T16:50:01.000Z
|
conda_lock/src_parser/selectors.py
|
kev-zheng/conda-lock
|
2717932c6a0d1a5fab4546fafe2d2f85e378f3ba
|
[
"MIT"
] | 9
|
2019-09-13T15:37:02.000Z
|
2021-09-04T02:13:24.000Z
|
import re
from typing import Iterator
def filter_platform_selectors(content: str, platform) -> Iterator[str]:
""""""
# we support a very limited set of selectors that adhere to platform only
platform_sel = {
"linux-64": {"linux64", "unix", "linux"},
"linux-aarch64": {"aarch64", "unix", "linux"},
"linux-ppc64le": {"ppc64le", "unix", "linux"},
"osx-64": {"osx", "osx64", "unix"},
"win-64": {"win", "win64"},
}
# This code is adapted from conda-build
sel_pat = re.compile(r"(.+?)\s*(#.*)?\[([^\[\]]+)\](?(2)[^\(\)]*)$")
for line in content.splitlines(keepends=False):
if line.lstrip().startswith("#"):
continue
m = sel_pat.match(line)
if m:
cond = m.group(3)
if cond in platform_sel[platform]:
yield line
else:
yield line
| 30.655172
| 77
| 0.527559
|
1dfd26485eea7253d05518bbe4fa394757c4acbb
| 14,632
|
py
|
Python
|
tests/sentry/lang/native/test_plugin.py
|
noscripter/sentry
|
1c5b1b53e740ffd2747afb7f0995e026be9468d0
|
[
"BSD-3-Clause"
] | 1
|
2021-01-13T15:40:03.000Z
|
2021-01-13T15:40:03.000Z
|
tests/sentry/lang/native/test_plugin.py
|
fotinakis/sentry
|
c5cfa5c5e47475bf5ef41e702548c2dfc7bb8a7c
|
[
"BSD-3-Clause"
] | null | null | null |
tests/sentry/lang/native/test_plugin.py
|
fotinakis/sentry
|
c5cfa5c5e47475bf5ef41e702548c2dfc7bb8a7c
|
[
"BSD-3-Clause"
] | 1
|
2019-11-01T14:48:32.000Z
|
2019-11-01T14:48:32.000Z
|
from __future__ import absolute_import
from mock import patch
from sentry.models import Event
from sentry.testutils import requires_llvm_symbolizer, TestCase
from sentry.lang.native.symbolizer import Symbolizer
@requires_llvm_symbolizer
class BasicResolvingIntegrationTest(TestCase):
@patch('sentry.lang.native.symbolizer.Symbolizer.symbolize_app_frame')
def test_frame_resolution(self, symbolize_frame):
object_name = (
"/var/containers/Bundle/Application/"
"B33C37A8-F933-4B6B-9FFA-152282BFDF13/"
"SentryTest.app/SentryTest"
)
symbolize_frame.return_value = {
'filename': 'Foo.swift',
'line': 42,
'column': 23,
'object_name': object_name,
'symbol_name': 'real_main',
'symbol_addr': '0x1000262a0',
"instruction_addr": '0x100026330',
}
event_data = {
"sentry.interfaces.User": {
"ip_address": "31.172.207.97"
},
"extra": {},
"project": self.project.id,
"platform": "cocoa",
"debug_meta": {
"images": [
{
"type": "apple",
"cpu_subtype": 0,
"uuid": "C05B4DDD-69A7-3840-A649-32180D341587",
"image_vmaddr": 4294967296,
"image_addr": 4295098368,
"cpu_type": 16777228,
"image_size": 32768,
"name": object_name,
}
],
"sdk_info": {
"dsym_type": "macho",
"sdk_name": "iOS",
"version_major": 9,
"version_minor": 3,
"version_patchlevel": 0
}
},
"sentry.interfaces.Exception": {
"values": [
{
"stacktrace": {
"frames": [
{
"function": "<redacted>",
"abs_path": None,
"instruction_offset": 4,
"package": "/usr/lib/system/libdyld.dylib",
"filename": None,
"symbol_addr": "0x002ac28b4",
"lineno": None,
"in_app": False,
"instruction_addr": "0x002ac28b8"
},
{
"function": "main",
"instruction_addr": 4295123760,
"symbol_addr": 4295123616,
"image_addr": 4295098368
},
{
"platform": "javascript",
"function": "merge",
"abs_path": "/scripts/views.js",
"vars": {},
"module": None,
"filename": "../../sentry/scripts/views.js",
"colno": 16,
"in_app": True,
"lineno": 268
}
]
},
"type": "NSRangeException",
"mechanism": {
"posix_signal": {
"signal": 6,
"code": 0,
"name": "SIGABRT",
"code_name": None
},
"type": "cocoa",
"mach_exception": {
"subcode": 0,
"code": 0,
"exception": 10,
"exception_name": "EXC_CRASH"
}
},
"value": (
"*** -[__NSArray0 objectAtIndex:]: index 3 "
"beyond bounds for empty NSArray"
)
}
]
},
"contexts": {
"device": {
"model_id": "N102AP",
"model": "iPod7,1",
"arch": "arm64",
"family": "iPod"
},
"os": {
"version": "9.3.2",
"rooted": False,
"build": "13F69",
"name": "iOS"
}
},
"threads": {
"values": [
{
"id": 39,
"stacktrace": {
"frames": [
{
"in_app": False,
"platform": "apple",
"package": "\/usr\/lib\/system\/libsystem_pthread.dylib",
"symbol_addr": "0x00000001843a102c",
"image_addr": "0x00000001843a0000",
"instruction_addr": "0x00000001843a1530"
},
{
"in_app": False,
"platform": "apple",
"package": "\/usr\/lib\/system\/libsystem_kernel.dylib",
"symbol_addr": "0x00000001842d8b40",
"image_addr": "0x00000001842bc000",
"instruction_addr": "0x00000001842d8b48"
}
]
},
"crashed": False,
"current": False
}
]
}
}
resp = self._postWithHeader(event_data)
assert resp.status_code == 200
event = Event.objects.get()
bt = event.interfaces['sentry.interfaces.Exception'].values[0].stacktrace
frames = bt.frames
assert frames[0].function == '<redacted>'
assert frames[0].instruction_addr == '0x002ac28b8'
assert not frames[0].in_app
assert frames[1].function == 'real_main'
assert frames[1].filename == 'Foo.swift'
assert frames[1].lineno == 42
assert frames[1].colno == 23
assert frames[1].package == object_name
assert frames[1].instruction_addr == '0x100026330'
assert frames[1].instruction_offset is None
assert frames[1].in_app
assert frames[2].platform == 'javascript'
assert frames[2].abs_path == '/scripts/views.js'
assert frames[2].function == 'merge'
assert frames[2].lineno == 268
assert frames[2].colno == 16
assert frames[2].filename == '../../sentry/scripts/views.js'
assert frames[2].instruction_offset is None
assert frames[2].in_app
assert len(event.interfaces['threads'].values) == 1
def sym_app_frame(self, frame):
object_name = (
"/var/containers/Bundle/Application/"
"B33C37A8-F933-4B6B-9FFA-152282BFDF13/"
"SentryTest.app/SentryTest"
)
if frame['instruction_addr'] == '0x1':
return {
'filename': 'Foo.swift',
'line': 82,
'column': 23,
'object_name': object_name,
'symbol_name': 'other_main',
'symbol_addr': '0x1',
"instruction_addr": '0x1',
}
return {
'filename': 'Foo.swift',
'line': 42,
'column': 23,
'object_name': object_name,
'symbol_name': 'real_main',
'symbol_addr': '0x1000262a0',
"instruction_addr": '0x100026330',
}
@patch.object(Symbolizer, 'symbolize_app_frame', sym_app_frame)
def test_frame_resolution_no_sdk_info(self):
object_name = (
"/var/containers/Bundle/Application/"
"B33C37A8-F933-4B6B-9FFA-152282BFDF13/"
"SentryTest.app/SentryTest"
)
event_data = {
"sentry.interfaces.User": {
"ip_address": "31.172.207.97"
},
"extra": {},
"project": self.project.id,
"platform": "cocoa",
"debug_meta": {
"images": [
{
"type": "apple",
"cpu_subtype": 0,
"uuid": "C05B4DDD-69A7-3840-A649-32180D341587",
"image_vmaddr": 4294967296,
"image_addr": 4295098368,
"cpu_type": 16777228,
"image_size": 32768,
"name": object_name,
}
]
},
"contexts": {
"os": {
"name": "iOS",
"version": "9.3.0"
}
},
"sentry.interfaces.Exception": {
"values": [
{
"stacktrace": {
"frames": [
{
"function": "<redacted>",
"abs_path": None,
"instruction_offset": 4,
"package": "/usr/lib/system/libdyld.dylib",
"filename": None,
"symbol_addr": "0x002ac28b4",
"lineno": None,
"in_app": False,
"instruction_addr": "0x002ac28b8"
},
{
"function": "main",
"instruction_addr": 4295123760,
"symbol_addr": 4295123616,
"image_addr": 4295098368
},
{
"function": "other_main",
"instruction_addr": 1,
"symbol_addr": 1,
"image_addr": 4295098368
},
{
"platform": "javascript",
"function": "merge",
"abs_path": "/scripts/views.js",
"vars": {},
"module": None,
"filename": "../../sentry/scripts/views.js",
"colno": 16,
"in_app": True,
"lineno": 268
}
]
},
"type": "NSRangeException",
"mechanism": {
"posix_signal": {
"signal": 6,
"code": 0,
"name": "SIGABRT",
"code_name": None
},
"type": "cocoa",
"mach_exception": {
"subcode": 0,
"code": 0,
"exception": 10,
"exception_name": "EXC_CRASH"
}
},
"value": (
"*** -[__NSArray0 objectAtIndex:]: index 3 "
"beyond bounds for empty NSArray"
)
}
]
},
"contexts": {
"device": {
"model_id": "N102AP",
"model": "iPod7,1",
"arch": "arm64",
"family": "iPod"
},
"os": {
"version": "9.3.2",
"rooted": False,
"build": "13F69",
"name": "iOS"
}
}
}
resp = self._postWithHeader(event_data)
assert resp.status_code == 200
event = Event.objects.get()
bt = event.interfaces['sentry.interfaces.Exception'].values[0].stacktrace
frames = bt.frames
assert frames[0].function == '<redacted>'
assert frames[0].instruction_addr == '0x002ac28b8'
assert not frames[0].in_app
assert frames[1].function == 'real_main'
assert frames[1].filename == 'Foo.swift'
assert frames[1].lineno == 42
assert frames[1].colno == 23
assert frames[1].package == object_name
assert frames[1].instruction_addr == '0x100026330'
assert frames[1].instruction_offset is None
assert frames[1].in_app
assert frames[2].function == 'other_main'
assert frames[2].filename == 'Foo.swift'
assert frames[2].lineno == 82
assert frames[2].colno == 23
assert frames[2].package == object_name
assert frames[2].instruction_addr == '0x000000001'
assert frames[2].instruction_offset is None
assert frames[2].in_app
assert frames[3].platform == 'javascript'
assert frames[3].abs_path == '/scripts/views.js'
assert frames[3].function == 'merge'
assert frames[3].lineno == 268
assert frames[3].colno == 16
assert frames[3].filename == '../../sentry/scripts/views.js'
assert frames[3].instruction_offset is None
assert frames[3].in_app
| 39.227882
| 93
| 0.362356
|
f7e5944768414b173c54865a6ddf2d931753fbc6
| 2,228
|
py
|
Python
|
tests/contrib/django/test_django_patch.py
|
p7g/dd-trace-py
|
141ac0ab6e9962e3b3bafc9de172076075289a19
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 308
|
2016-12-07T16:49:27.000Z
|
2022-03-15T10:06:45.000Z
|
tests/contrib/django/test_django_patch.py
|
p7g/dd-trace-py
|
141ac0ab6e9962e3b3bafc9de172076075289a19
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 1,928
|
2016-11-28T17:13:18.000Z
|
2022-03-31T21:43:19.000Z
|
tests/contrib/django/test_django_patch.py
|
p7g/dd-trace-py
|
141ac0ab6e9962e3b3bafc9de172076075289a19
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 311
|
2016-11-27T03:01:49.000Z
|
2022-03-18T21:34:03.000Z
|
from ddtrace.contrib.django import patch
from tests.contrib.patch import PatchTestCase
class TestDjangoPatch(PatchTestCase.Base):
__integration_name__ = "django"
__module_name__ = "django"
__patch_func__ = patch
__unpatch_func__ = None
def assert_module_patched(self, django):
self.assert_wrapped(django.apps.registry.Apps.populate)
self.assert_wrapped(django.core.handlers.base.BaseHandler.load_middleware)
self.assert_wrapped(django.core.handlers.base.BaseHandler.get_response)
self.assert_wrapped(django.template.base.Template.render)
if django.VERSION >= (2, 0, 0):
self.assert_wrapped(django.urls.path)
self.assert_wrapped(django.urls.re_path)
self.assert_wrapped(django.views.generic.base.View.as_view)
self.assert_wrapped(django.db.connections.__setitem__)
def assert_not_module_patched(self, django):
self.assert_not_wrapped(django.apps.registry.Apps.populate)
self.assert_not_wrapped(django.core.handlers.base.BaseHandler.load_middleware)
self.assert_not_wrapped(django.core.handlers.base.BaseHandler.get_response)
self.assert_not_wrapped(django.template.base.Template.render)
if django.VERSION >= (2, 0, 0):
self.assert_not_wrapped(django.urls.path)
self.assert_not_wrapped(django.urls.re_path)
self.assert_not_wrapped(django.views.generic.base.View.as_view)
self.assert_not_wrapped(django.db.connections.__setitem__)
def assert_not_module_double_patched(self, django):
self.assert_not_double_wrapped(django.apps.registry.Apps.populate)
self.assert_not_double_wrapped(django.core.handlers.base.BaseHandler.load_middleware)
self.assert_not_double_wrapped(django.core.handlers.base.BaseHandler.get_response)
self.assert_not_double_wrapped(django.template.base.Template.render)
if django.VERSION >= (2, 0, 0):
self.assert_not_double_wrapped(django.urls.path)
self.assert_not_double_wrapped(django.urls.re_path)
self.assert_not_double_wrapped(django.views.generic.base.View.as_view)
self.assert_not_double_wrapped(django.db.connections.__setitem__)
| 51.813953
| 93
| 0.748654
|
e48d22c11a99afabbbf8f8376da09f289e92b69e
| 1,760
|
py
|
Python
|
picture/migrations/0001_initial.py
|
Michellemukami/django-image
|
754a4b0ce32415383f99f1079335b721b56f14f3
|
[
"MIT"
] | 1
|
2019-09-20T09:04:27.000Z
|
2019-09-20T09:04:27.000Z
|
picture/migrations/0001_initial.py
|
Michellemukami/django-image
|
754a4b0ce32415383f99f1079335b721b56f14f3
|
[
"MIT"
] | 4
|
2020-06-05T22:35:14.000Z
|
2021-06-10T21:52:40.000Z
|
picture/migrations/0001_initial.py
|
Michellemukami/django-image
|
754a4b0ce32415383f99f1079335b721b56f14f3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-08-22 09:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=60)),
('post', models.TextField()),
('pub_date', models.DateTimeField(auto_now_add=True)),
('Pixels_image', models.ImageField(blank=True, upload_to='pixels/')),
],
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
],
),
migrations.AddField(
model_name='image',
name='Location',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='picture.Location'),
),
migrations.AddField(
model_name='image',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='picture.Category'),
),
]
| 33.846154
| 114
| 0.568182
|
91b9ee7f07e53019ee0d1593c8ee8751228ee83e
| 17,384
|
py
|
Python
|
localstack/services/s3/s3_utils.py
|
matt-mercer/localstack
|
b69ba25e495c6ef889d33a050b216d0cd1035041
|
[
"Apache-2.0"
] | null | null | null |
localstack/services/s3/s3_utils.py
|
matt-mercer/localstack
|
b69ba25e495c6ef889d33a050b216d0cd1035041
|
[
"Apache-2.0"
] | null | null | null |
localstack/services/s3/s3_utils.py
|
matt-mercer/localstack
|
b69ba25e495c6ef889d33a050b216d0cd1035041
|
[
"Apache-2.0"
] | null | null | null |
import datetime
import logging
import re
import time
from collections import namedtuple
from typing import Dict
from urllib import parse as urlparse
from urllib.parse import parse_qs, urlencode
from botocore.awsrequest import create_request_object
from botocore.compat import urlsplit
from botocore.credentials import Credentials
from localstack import config
from localstack.constants import (
S3_STATIC_WEBSITE_HOSTNAME,
S3_VIRTUAL_HOSTNAME,
TEST_AWS_ACCESS_KEY_ID,
TEST_AWS_SECRET_ACCESS_KEY,
)
from localstack.utils.auth import HmacV1QueryAuth, S3SigV4QueryAuth
from localstack.utils.aws.aws_responses import requests_error_response_xml_signature_calculation
LOGGER = logging.getLogger(__name__)
REGION_REGEX = r"[a-z]{2}-[a-z]+-[0-9]{1,}"
PORT_REGEX = r"(:[\d]{0,6})?"
S3_STATIC_WEBSITE_HOST_REGEX = r"^([^.]+)\.s3-website\.localhost\.localstack\.cloud(:[\d]{0,6})?$"
S3_VIRTUAL_HOSTNAME_REGEX = ( # path based refs have at least valid bucket expression (separated by .) followed by .s3
r"^(http(s)?://)?((?!s3\.)[^\./]+)\." # the negative lookahead part is for considering buckets
r"(((s3(-website)?\.({}\.)?)localhost(\.localstack\.cloud)?)|(localhost\.localstack\.cloud)|"
r"(s3((-website)|(-external-1))?[\.-](dualstack\.)?"
r"({}\.)?amazonaws\.com(.cn)?)){}(/[\w\-. ]*)*$"
).format(
REGION_REGEX, REGION_REGEX, PORT_REGEX
)
BUCKET_NAME_REGEX = (
r"(?=^.{3,63}$)(?!^(\d+\.)+\d+$)"
+ r"(^(([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])\.)*([a-z0-9]|[a-z0-9][a-z0-9\-]*[a-z0-9])$)"
)
HOST_COMBINATION_REGEX = r"^(.*)(:[\d]{0,6})"
PORT_REPLACEMENT = [":80", ":443", ":%s" % config.EDGE_PORT, ""]
# response header overrides the client may request
ALLOWED_HEADER_OVERRIDES = {
"response-content-type": "Content-Type",
"response-content-language": "Content-Language",
"response-expires": "Expires",
"response-cache-control": "Cache-Control",
"response-content-disposition": "Content-Disposition",
"response-content-encoding": "Content-Encoding",
}
# params are required in presigned url
SIGNATURE_V2_PARAMS = ["Signature", "Expires", "AWSAccessKeyId"]
SIGNATURE_V4_PARAMS = [
"X-Amz-Algorithm",
"X-Amz-Credential",
"X-Amz-Date",
"X-Amz-Expires",
"X-Amz-SignedHeaders",
"X-Amz-Signature",
]
# headers to blacklist from request_dict.signed_headers
BLACKLISTED_HEADERS = ["X-Amz-Security-Token"]
# query params overrides for multipart upload and node sdk
ALLOWED_QUERY_PARAMS = [
"X-id",
"X-Amz-User-Agent",
"X-Amz-Content-Sha256",
"versionid",
"uploadid",
"partnumber",
]
def is_static_website(headers):
"""
Determine if the incoming request is for s3 static website hosting
returns True if the host matches website regex
returns False if the host does not matches website regex
"""
return bool(re.match(S3_STATIC_WEBSITE_HOST_REGEX, headers.get("host", "")))
def uses_host_addressing(headers: Dict[str, str]):
"""
Determines if the bucket is using host based addressing style or path based.
"""
# we can assume that the host header we are receiving here is actually the header we originally received
# from the client (because the edge service is forwarding the request in memory)
match = re.match(S3_VIRTUAL_HOSTNAME_REGEX, headers.get("host", ""))
# checks whether there is a bucket name. This is sort of hacky
return True if match and match.group(3) else False
def extract_bucket_name(headers, path):
"""
Extract the bucket name
if using host based addressing it's extracted from host header
if using path based addressing it's extracted form the path
"""
bucket_name = None
if uses_host_addressing(headers):
pattern = re.compile(S3_VIRTUAL_HOSTNAME_REGEX)
match = pattern.match(headers.get("host", ""))
if match and match.group(3):
bucket_name = match.group(3)
else:
bucket_name = path.split("/", maxsplit=2)[1]
return bucket_name if bucket_name else None
def extract_key_name(headers, path):
"""
Extract the key name from the path depending on addressing_style
"""
key_name = None
path = path.split("?")[0] # strip off query params from path
if uses_host_addressing(headers):
split = path.split("/", maxsplit=1)
if len(split) > 1:
key_name = split[1]
else:
split = path.split("/", maxsplit=2)
if len(split) > 2:
key_name = split[2]
return key_name if key_name else None
def extract_bucket_and_key_name(headers, path):
return extract_bucket_name(headers, path), extract_key_name(headers, path)
def normalize_bucket_name(bucket_name):
bucket_name = bucket_name or ""
bucket_name = bucket_name.lower()
return bucket_name
def validate_bucket_name(bucket_name):
"""
Validate s3 bucket name based on the documentation
ref. https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html
"""
return True if re.match(BUCKET_NAME_REGEX, bucket_name) else False
def get_bucket_hostname(bucket_name):
"""
Get bucket name for addressing style host
"""
return "%s.%s:%s" % (bucket_name, S3_VIRTUAL_HOSTNAME, config.EDGE_PORT)
def get_bucket_website_hostname(bucket_name):
"""
Get bucket name for addressing style host for website hosting
"""
return "%s.%s:%s" % (bucket_name, S3_STATIC_WEBSITE_HOSTNAME, config.EDGE_PORT)
def get_forwarded_for_host(headers):
x_forwarded_header = re.split(r",\s?", headers.get("X-Forwarded-For", ""))
host = x_forwarded_header[-1]
return host
def is_real_s3_url(url):
return re.match(r".*s3(\-website)?\.([^\.]+\.)?amazonaws.com.*", url or "")
def get_key_from_s3_url(url: str, leading_slash: bool = False) -> str:
"""Extract the object key from an S3 URL"""
result = re.sub(r"^s3://[^/]+", "", url, flags=re.IGNORECASE).strip()
result = result.lstrip("/")
result = f"/{result}" if leading_slash else result
return result
def is_object_download_request(method, path, headers) -> bool:
"""Return whether this is a GetObject download request."""
return method == "GET" and bool(extract_key_name(headers, path))
def is_expired(expiry_datetime):
now_datetime = datetime.datetime.now(tz=expiry_datetime.tzinfo)
return now_datetime > expiry_datetime
def authenticate_presign_url(method, path, headers, data=None):
url = "{}{}".format(config.get_edge_url(), path)
parsed = urlparse.urlparse(url)
query_params = parse_qs(parsed.query)
forwarded_for = get_forwarded_for_host(headers)
if forwarded_for:
url = re.sub("://[^/]+", "://%s" % forwarded_for, url)
LOGGER.debug("Received presign S3 URL: %s", url)
sign_headers = {}
query_string = {}
is_v2 = all(p in query_params for p in SIGNATURE_V2_PARAMS)
is_v4 = all(p in query_params for p in SIGNATURE_V4_PARAMS)
# Add overrided headers to the query string params
for param_name, header_name in ALLOWED_HEADER_OVERRIDES.items():
if param_name in query_params:
query_string[param_name] = query_params[param_name][0]
# Request's headers are more essentials than the query parameters in the request.
# Different values of header in the header of the request and in the query parameter of the
# request URL will fail the signature calulation. As per the AWS behaviour
# Add valid headers into the sign_header. Skip the overrided headers
# and the headers which have been sent in the query string param
presign_params_lower = (
[p.lower() for p in SIGNATURE_V4_PARAMS]
if is_v4
else [p.lower() for p in SIGNATURE_V2_PARAMS]
)
params_header_override = [
param_name for param_name, header_name in ALLOWED_HEADER_OVERRIDES.items()
]
if len(query_params) > 2:
for key in query_params:
key_lower = key.lower()
if key_lower not in presign_params_lower:
if (
key_lower not in (header[0].lower() for header in headers)
and key_lower not in params_header_override
):
if key_lower in (
allowed_param.lower() for allowed_param in ALLOWED_QUERY_PARAMS
):
query_string[key] = query_params[key][0]
elif key_lower in (
blacklisted_header.lower() for blacklisted_header in BLACKLISTED_HEADERS
):
pass
else:
query_string[key] = query_params[key][0]
for header_name, header_value in headers.items():
header_name_lower = header_name.lower()
if header_name_lower.startswith("x-amz-") or header_name_lower.startswith("content-"):
if is_v2 and header_name_lower in query_params:
sign_headers[header_name] = header_value
if is_v4 and header_name_lower in query_params["X-Amz-SignedHeaders"][0]:
sign_headers[header_name] = header_value
# Preparnig dictionary of request to build AWSRequest's object of the botocore
request_url = "{}://{}{}".format(parsed.scheme, parsed.netloc, parsed.path)
# Fix https://github.com/localstack/localstack/issues/3912
# urlencode method replaces white spaces with plus sign cause signature calculation to fail
query_string_encoded = (
urlencode(query_string, quote_via=urlparse.quote, safe=" ") if query_string else None
)
request_url = "%s?%s" % (request_url, query_string_encoded) if query_string else request_url
if forwarded_for:
request_url = re.sub("://[^/]+", "://%s" % forwarded_for, request_url)
bucket_name = extract_bucket_name(headers, parsed.path)
request_dict = {
"url_path": parsed.path,
"query_string": query_string,
"method": method,
"headers": sign_headers,
"body": b"",
"url": request_url,
"context": {
"is_presign_request": True,
"use_global_endpoint": True,
"signing": {"bucket": bucket_name},
},
}
# Support for virtual host addressing style in signature version 2
# We don't need to do this in v4 as we already concerting it to the virtual addressing style.
# v2 require path base styled request_dict and v4 require virtual styled request_dict
if uses_host_addressing(headers) and is_v2:
request_dict["url_path"] = "/{}{}".format(bucket_name, request_dict["url_path"])
parsed_url = urlparse.urlparse(request_url)
request_dict["url"] = "{}://{}:{}{}".format(
parsed_url.scheme,
S3_VIRTUAL_HOSTNAME,
config.EDGE_PORT,
request_dict["url_path"],
)
request_dict["url"] = (
"%s?%s" % (request_dict["url"], query_string_encoded)
if query_string
else request_dict["url"]
)
if not is_v2 and any(p in query_params for p in SIGNATURE_V2_PARAMS):
response = requests_error_response_xml_signature_calculation(
code=403,
message="Query-string authentication requires the Signature, Expires and AWSAccessKeyId parameters",
code_string="AccessDenied",
)
elif is_v2 and not is_v4:
response = authenticate_presign_url_signv2(
method, path, headers, data, url, query_params, request_dict
)
if not is_v4 and any(p in query_params for p in SIGNATURE_V4_PARAMS):
response = requests_error_response_xml_signature_calculation(
code=403,
message="Query-string authentication requires the X-Amz-Algorithm, \
X-Amz-Credential, X-Amz-Date, X-Amz-Expires, \
X-Amz-SignedHeaders and X-Amz-Signature parameters.",
code_string="AccessDenied",
)
elif is_v4 and not is_v2:
response = authenticate_presign_url_signv4(
method, path, headers, data, url, query_params, request_dict
)
if response is not None:
LOGGER.info("Presign signature calculation failed: %s", response)
return response
LOGGER.debug("Valid presign url.")
def authenticate_presign_url_signv2(method, path, headers, data, url, query_params, request_dict):
# Calculating Signature
aws_request = create_request_object(request_dict)
credentials = Credentials(
access_key=TEST_AWS_ACCESS_KEY_ID,
secret_key=TEST_AWS_SECRET_ACCESS_KEY,
token=query_params.get("X-Amz-Security-Token", None),
)
auth = HmacV1QueryAuth(credentials=credentials, expires=query_params["Expires"][0])
split = urlsplit(aws_request.url)
string_to_sign = auth.get_string_to_sign(
method=method, split=split, headers=aws_request.headers
)
signature = auth.get_signature(string_to_sign=string_to_sign)
# Comparing the signature in url with signature we calculated
query_sig = urlparse.unquote(query_params["Signature"][0])
if config.S3_SKIP_SIGNATURE_VALIDATION:
if query_sig != signature:
LOGGER.warning(
"Signatures do not match, but not raising an error, as S3_SKIP_SIGNATURE_VALIDATION=1"
)
signature = query_sig
if query_sig != signature:
return requests_error_response_xml_signature_calculation(
code=403,
code_string="SignatureDoesNotMatch",
aws_access_token=TEST_AWS_ACCESS_KEY_ID,
string_to_sign=string_to_sign,
signature=signature,
message="The request signature we calculated does not match the signature you provided. \
Check your key and signing method.",
)
# Checking whether the url is expired or not
if int(query_params["Expires"][0]) < time.time():
if config.S3_SKIP_SIGNATURE_VALIDATION:
LOGGER.warning(
"Signature is expired, but not raising an error, as S3_SKIP_SIGNATURE_VALIDATION=1"
)
else:
return requests_error_response_xml_signature_calculation(
code=403,
code_string="AccessDenied",
message="Request has expired",
expires=query_params["Expires"][0],
)
def authenticate_presign_url_signv4(method, path, headers, data, url, query_params, request_dict):
is_presign_valid = False
for port in PORT_REPLACEMENT:
match = re.match(HOST_COMBINATION_REGEX, urlparse.urlparse(request_dict["url"]).netloc)
if match and match.group(2):
request_dict["url"] = request_dict["url"].replace("%s" % match.group(2), "%s" % port)
else:
request_dict["url"] = "%s:%s" % (request_dict["url"], port)
# Calculating Signature
aws_request = create_request_object(request_dict)
ReadOnlyCredentials = namedtuple(
"ReadOnlyCredentials", ["access_key", "secret_key", "token"]
)
credentials = ReadOnlyCredentials(
TEST_AWS_ACCESS_KEY_ID,
TEST_AWS_SECRET_ACCESS_KEY,
query_params.get("X-Amz-Security-Token", None),
)
region = query_params["X-Amz-Credential"][0].split("/")[2]
signer = S3SigV4QueryAuth(
credentials, "s3", region, expires=int(query_params["X-Amz-Expires"][0])
)
signature = signer.add_auth(aws_request, query_params["X-Amz-Date"][0])
expiration_time = datetime.datetime.strptime(
query_params["X-Amz-Date"][0], "%Y%m%dT%H%M%SZ"
) + datetime.timedelta(seconds=int(query_params["X-Amz-Expires"][0]))
expiration_time = expiration_time.replace(tzinfo=datetime.timezone.utc)
# Comparing the signature in url with signature we calculated
query_sig = urlparse.unquote(query_params["X-Amz-Signature"][0])
if query_sig == signature:
is_presign_valid = True
break
# Comparing the signature in url with signature we calculated
if config.S3_SKIP_SIGNATURE_VALIDATION:
if not is_presign_valid:
LOGGER.warning(
"Signatures do not match, but not raising an error, as S3_SKIP_SIGNATURE_VALIDATION=1"
)
signature = query_sig
is_presign_valid = True
if not is_presign_valid:
return requests_error_response_xml_signature_calculation(
code=403,
code_string="SignatureDoesNotMatch",
aws_access_token=TEST_AWS_ACCESS_KEY_ID,
signature=signature,
message="The request signature we calculated does not match the signature you provided. \
Check your key and signing method.",
)
# Checking whether the url is expired or not
if is_expired(expiration_time):
if config.S3_SKIP_SIGNATURE_VALIDATION:
LOGGER.warning(
"Signature is expired, but not raising an error, as S3_SKIP_SIGNATURE_VALIDATION=1"
)
else:
return requests_error_response_xml_signature_calculation(
code=403,
code_string="AccessDenied",
message="Request has expired",
expires=query_params["X-Amz-Expires"][0],
)
| 37.956332
| 119
| 0.657041
|
c24d32c700fd4035542a928073200c021eb085fc
| 1,860
|
py
|
Python
|
wb/main/models/tokenizer/validate_tokenizer_jobs_model.py
|
apaniukov/workbench
|
2f2653ecfd0143d2d53e33ad84379f13443fdfaa
|
[
"Apache-2.0"
] | 23
|
2022-03-17T12:24:09.000Z
|
2022-03-31T09:13:30.000Z
|
wb/main/models/tokenizer/validate_tokenizer_jobs_model.py
|
apaniukov/workbench
|
2f2653ecfd0143d2d53e33ad84379f13443fdfaa
|
[
"Apache-2.0"
] | 18
|
2022-03-21T08:17:44.000Z
|
2022-03-30T12:42:30.000Z
|
wb/main/models/tokenizer/validate_tokenizer_jobs_model.py
|
apaniukov/workbench
|
2f2653ecfd0143d2d53e33ad84379f13443fdfaa
|
[
"Apache-2.0"
] | 16
|
2022-03-17T12:24:14.000Z
|
2022-03-31T12:15:12.000Z
|
"""
OpenVINO DL Workbench
Validate tokenizer upload job model
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from sqlalchemy import Integer, Column, ForeignKey
from sqlalchemy.orm import relationship, backref
from wb.main.enumerates import JobTypesEnum
from wb.main.models.jobs_model import JobsModel
from wb.main.models.tokenizer import TokenizerModel
from wb.main.models.tokenizer.wait_tokenizer_upload_jobs_model import TokenizerUploadJobData
class ValidateTokenizerJobModel(JobsModel):
__tablename__ = 'validate_tokenizer_jobs'
__mapper_args__ = {
'polymorphic_identity': JobTypesEnum.validate_tokenizer_type.value
}
job_id = Column(Integer, ForeignKey(JobsModel.job_id), primary_key=True)
tokenizer_id = Column(Integer, ForeignKey(TokenizerModel.id), nullable=False)
tokenizer: TokenizerModel = relationship(
TokenizerModel,
foreign_keys=[tokenizer_id],
uselist=False,
backref=backref('validate_tokenizer_jobs', cascade='delete,all')
)
def __init__(self, data: TokenizerUploadJobData):
super().__init__(data)
self.tokenizer_id = data['tokenizer_id']
self.topology_id = data['model_id']
def json(self) -> dict:
return {
**super().json(),
'tokenizer': self.tokenizer.json(),
}
| 34.444444
| 92
| 0.73871
|
fd6cd5694a3eccc73a1b9b892437e62dee36fc03
| 634
|
py
|
Python
|
4949/solution.py
|
bossm0n5t3r/BOJ
|
03132388a0c76ef66d6b0dec2053aeca65c4aee6
|
[
"MIT"
] | 2
|
2020-01-14T07:27:25.000Z
|
2020-02-12T07:49:58.000Z
|
4949/solution.py
|
bossm0n5t3r/BOJ
|
03132388a0c76ef66d6b0dec2053aeca65c4aee6
|
[
"MIT"
] | 1
|
2020-01-14T07:29:30.000Z
|
2021-11-28T11:29:08.000Z
|
4949/solution.py
|
bossm0n5t3r/BOJ
|
03132388a0c76ef66d6b0dec2053aeca65c4aee6
|
[
"MIT"
] | null | null | null |
import sys
input = sys.stdin.readline
print = sys.stdout.writelines
def sol():
while True:
input_str = input().rstrip()
if input_str == ".":
break
print("yes\n" if is_valid(input_str) else "no\n")
def is_valid(S):
stack = []
for c in S:
if c in "([":
stack.append(c)
elif c in ")]":
if not stack:
return False
if c == ")" and stack.pop() != "(":
return False
if c == "]" and stack.pop() != "[":
return False
return not stack
if __name__ == "__main__":
sol()
| 19.212121
| 57
| 0.463722
|
c24a0157c2eedcb21eeab2ed47ea7269675dcd83
| 25,136
|
py
|
Python
|
Sindri/eos.py
|
mrcsbrn/TCC_software
|
17a5335aed17d4740c3bbd0ef828b0fc5dcea1da
|
[
"MIT"
] | 11
|
2019-10-17T02:01:51.000Z
|
2022-03-17T17:39:34.000Z
|
Sindri/eos.py
|
mrcsbrn/TCC_software
|
17a5335aed17d4740c3bbd0ef828b0fc5dcea1da
|
[
"MIT"
] | 2
|
2019-07-25T22:16:16.000Z
|
2020-03-28T01:59:59.000Z
|
Sindri/eos.py
|
mrcsbrn/TCC_software
|
17a5335aed17d4740c3bbd0ef828b0fc5dcea1da
|
[
"MIT"
] | 5
|
2019-07-15T18:19:36.000Z
|
2021-12-24T08:06:24.000Z
|
import numpy as np
import sympy as sp
from scipy.integrate import quad
from CubicEOS import CubicEOS
from Properties import Props
from compounds import MixtureProp
from constants import R_IG
from polyEqSolver import solve_cubic
eos_options = {
"van der Waals (1890)": "van_der_waals_1890",
"Redlich and Kwong (1949)": "redlich_and_kwong_1949",
"Wilson (1964)": "wilson_1964",
"Soave (1972)": "soave_1972",
"Peng and Robinson (1976)": "peng_and_robinson_1976",
# "Schmidt and Wenzel (1979)": "schmidt_and_wenzel_1979", # conferir regra de mistura para esse
"Péneloux, et al. (1982)": "peneloux_et_al_1982",
"Patel and Teja (1982)": "patel_and_teja_1982",
"Adachi, et al. (1983)": "adachi_et_al_1983",
"Soave (1984)": "soave_1984",
"Adachi, et al. (1985)": "adachi_et_al_1985",
"Stryjek and Vera (1986)": "stryjek_and_vera_1986",
"Twu, et al. (1995)": "twu_et_al_1995",
"Ahlers-Gmehling (2001)": "ahlers_gmehling_2001",
"Gasem, et al. PR modification (2001)": "gasem_et_al_pr_2001",
"Gasem, et al. Twu modificaton (2001)": "gasem_et_al_twu_2001",
"Gasem, et al.(2001)": "gasem_et_al_2001",
}
class EOS(CubicEOS):
def __init__(self, mix: MixtureProp, k, eos):
super().__init__()
self.mix = mix
self.y = np.atleast_1d(self.mix.y)
self.k = k
self.n = self.mix.n
self.symb_N = sp.symbols("N", real=True, positive=True)
self.symb_Ns = sp.symbols("N:{}".format(self.n), real=True, positive=True)
self.symb_y = sp.symbols("y", real=True, positive=True)
self.symb_ys = sp.symbols("y:{}".format(self.n), real=True, positive=True)
self.symb_thetam = 0
self.symb_bm = 0
self.symb_epsilonm = 0
self.symb_deltam = 0
self.eosDisplayName = eos
self.eosValue = eos_options[self.eosDisplayName]
self.Zcs = np.zeros(self.n)
self.Vcs = np.zeros(self.n)
self.Pcs = np.zeros(self.n)
self.Tcs = np.zeros(self.n)
self.omegas = np.zeros(self.n)
for i in range(self.n):
self.Zcs[i] = self.mix.substances[i].Zc
self.Vcs[i] = self.mix.substances[i].Vc
self.Tcs[i] = self.mix.substances[i].Tc
self.Pcs[i] = self.mix.substances[i].Pc
self.omegas[i] = self.mix.substances[i].omega
self._initialize()
self._computeParameters()
def _initialize(self):
if self.eosValue == "van_der_waals_1890":
thetas = []
self.b = 0
for i in range(self.n):
self.b += (0.125 / (self.Pcs[i] / (R_IG * self.Tcs[i]))) * self.y[i]
thetas.append(0.42188 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i])
self._calculate_theta_mixture(thetas)
self.delta = 0.0
self.epsilon = 0.0
elif self.eosValue == "redlich_and_kwong_1949":
thetas = []
self.b = 0
for i in range(self.n):
self.b += (0.08664 / (self.Pcs[i] / (R_IG * self.Tcs[i]))) * self.y[i]
thetas.append(
(0.42748 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i])
/ (self.T / self.Tcs[i]) ** 0.5
)
self._calculate_theta_mixture(thetas)
self.delta = self.b
self.epsilon = 0.0
elif self.eosValue == "wilson_1964":
thetas = []
self.b = 0
for i in range(self.n):
self.b += (0.08664 / (self.Pcs[i] / (R_IG * self.Tcs[i]))) * self.y[i]
a = 0.42748 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
alpha = (self.T / self.Tcs[i]) * (
1
+ (1.57 + 1.62 * self.omegas[i])
* (1.0 / (self.T / self.Tcs[i]) - 1.0)
)
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = self.b
self.epsilon = 0.0
elif self.eosValue == "soave_1972":
thetas = []
self.b = 0
for i in range(self.n):
self.b += (0.08664 / (self.Pcs[i] / (R_IG * self.Tcs[i]))) * self.y[i]
a = 0.42748 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
alpha = (
1.0
+ (0.48 + 1.574 * self.omegas[i] - 0.176 * self.omegas[i] ** 2)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = self.b
self.epsilon = 0.0
elif self.eosValue == "peng_and_robinson_1976":
thetas = []
self.b = 0
for i in range(self.n):
self.b += self.y[i] * (0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i])))
self.symb_bm += (self.symb_ys[i]) * (
0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i]))
)
_tmpthetas = (
(
1.0
+ (
0.37464
+ 1.54226 * self.omegas[i]
- 0.2699 * self.omegas[i] ** 2
)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
)
** 2
) * (0.45724 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i])
thetas.append(_tmpthetas)
self._calculate_theta_mixture(thetas)
self._symb_calculate_theta_mixture(thetas)
self.delta = 2 * self.b
self.symb_deltam = 2 * self.symb_bm
self.epsilon = -self.b * self.b
self.symb_epsilonm = -self.symb_bm ** 2
elif self.eosValue == "peneloux_et_al_1982":
thetas = []
self.b = 0
c = 0
for i in range(self.n):
a = 0.42748 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
c += (
0.40768
* (R_IG * self.Tcs[i] / self.Pcs[i])
* (0.00385 + 0.08775 * self.omegas[i])
) * self.y[i]
self.b += (0.08664 / (self.Pcs[i] / (R_IG * self.Tcs[i]))) * self.y[i]
alpha = (
1.0
+ (0.48 + 1.574 * self.omegas[i] - 0.176 * self.omegas[i] ** 2)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
thetas.append(a * alpha)
self.b = self.b - c
self._calculate_theta_mixture(thetas)
self.delta = self.b + 2 * c
self.epsilon = c * (self.b + c)
elif self.eosValue == "patel_and_teja_1982":
thetas = []
self.b = 0
c = 0
for i in range(self.n):
F = 0.45241 + 1.30982 * self.omegas[i] - 0.295937 * self.omegas[i] ** 2
zeta_c = (
0.32903
- 0.076799 * self.omegas[i]
+ 0.0211947 * self.omegas[i] ** 2
)
r = np.atleast_1d(
solve_cubic(1, 2 - 3 * zeta_c, 3 * zeta_c ** 2, -zeta_c ** 3)
)
omega_b = np.min(r[r >= 0])
omega_c = 1 - 3 * zeta_c
omega_a = (
3 * zeta_c ** 2
+ 3 * (1 - 2 * zeta_c) * omega_b
+ omega_b ** 2
+ 1
- 3 * zeta_c
)
c += self.y[i] * omega_c * R_IG * self.Tcs[i] / self.Pcs[i]
self.b += self.y[i] * omega_b * R_IG * self.Tcs[i] / self.Pcs[i]
alpha = (1 + F * (1 - (self.T / self.Tcs[i]) ** 0.5)) ** 2
a = omega_a * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = self.b + c
self.epsilon = -self.b * c
elif self.eosValue == "adachi_et_al_1983":
thetas = []
self.b = 0
c = 0
b1, b2, b3 = 0, 0, 0
for i in range(self.n):
b1 += self.y[i] * (
R_IG
* self.Tcs[i]
* (
0.08974
- 0.03452 * self.omegas[i]
+ 0.00330 * self.omegas[i] ** 2
)
/ self.Pcs[i]
)
b2 += self.y[i] * (
R_IG
* self.Tcs[i]
* (
0.03686
+ 0.00405 * self.omegas[i]
- 0.01073 * self.omegas[i] ** 2
+ 0.00157 * self.omegas[i] ** 3
)
/ self.Pcs[i]
)
b3 += self.y[i] * (
R_IG
* self.Tcs[i]
* (
0.154
+ 0.14122 * self.omegas[i]
- 0.00272 * self.omegas[i] ** 2
- 0.00484 * self.omegas[i] ** 3
)
/ self.Pcs[i]
)
a = (
(R_IG * self.Tcs[i]) ** 2
* (
0.44869
+ 0.04024 * self.omegas[i]
+ 0.01111 * self.omegas[i] ** 2
- 0.00576 * self.omegas[i] ** 3
)
/ self.Pcs[i]
)
alpha = (
1
+ (0.407 + 1.3787 * self.omegas[i] - 0.2933 * self.omegas[i] ** 2)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
thetas.append(a * alpha)
self.b = b1
self._calculate_theta_mixture(thetas)
self.delta = b3 - b2
self.epsilon = -b2 * b3
elif self.eosValue == "soave_1984":
thetas = []
self.b = 0
self.epsilon = 0
for i in range(self.n):
self.b += (0.08333 / (self.Pcs[i] / (R_IG * self.Tcs[i]))) * self.y[i]
self.epsilon += (
0.001736 / (self.Pcs[i] / (R_IG * self.Tcs[i])) ** 2 * self.y[i]
)
a = 0.42188 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
alpha = (
1.0
+ (
0.4998
+ 1.5928 * self.omegas[i]
- 0.19563 * self.omegas[i] ** 2
+ 0.025 * self.omegas[i] ** 3
)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = self.b
elif self.eosValue == "adachi_et_al_1985":
thetas = []
self.b = 0
c = 0
for i in range(self.n):
a = (
(R_IG * self.Tcs[i]) ** 2
* (
0.43711
+ 0.02366 * self.omegas[i]
+ 0.10538 * self.omegas[i] ** 2
+ 0.10164 * self.omegas[i] ** 3
)
/ self.Pcs[i]
)
self.b += self.y[i] * (
(R_IG * self.Tcs[i])
* (
0.08779
- 0.02181 * self.omegas[i]
- 0.06708 * self.omegas[i] ** 2
+ 0.10617 * self.omegas[i] ** 3
)
/ self.Pcs[i]
)
c += self.y[i] * (
(R_IG * self.Tcs[i])
* (
0.0506
+ 0.04184 * self.omegas[i]
+ 0.16413 * self.omegas[i] ** 2
- 0.03975 * self.omegas[i] ** 3
)
/ self.Pcs[i]
)
alpha = (
1
+ (
0.4406
+ 1.7039 * self.omegas[i]
- 1.729 * self.omegas[i] ** 2
+ 0.9929 * self.omegas[i] ** 3
)
* (1 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = 2 * c
self.epsilon = -c ** 2
elif self.eosValue == "stryjek_and_vera_1986":
thetas = []
self.b = 0
for i in range(self.n):
self.b += self.y[i] * (0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i])))
self.symb_bm += (self.symb_ys[i]) * (
0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i]))
)
k0 = (
0.378893
+ 1.48971530 * self.omegas[i]
- 0.17131848 * self.omegas[i] ** 2
+ 0.0196554 * self.omegas[i] ** 3
)
k1 = 0
name = self.mix.substances[i].Name
if name == "hexadecane":
k1 = 0.02665
elif name == "hexane":
k1 = 0.05104
elif name == "cyclohexane":
k1 = 0.07023
elif name == "methane":
k1 = -0.00159
elif name == "benzene":
k1 = 0.07019
Tr = self.T / self.Tcs[i]
k = k0 + k1 * (1 + Tr) * (0.7 - Tr)
_tmpthetas = (
(1.0 + (k) * (1.0 - (self.T / self.Tcs[i]) ** 0.5)) ** 2
) * (0.45724 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i])
thetas.append(_tmpthetas)
self._calculate_theta_mixture(thetas)
self._symb_calculate_theta_mixture(thetas)
self.delta = 2 * self.b
self.symb_deltam = 2 * self.symb_bm
self.epsilon = -self.b * self.b
self.symb_epsilonm = -self.symb_bm ** 2
elif self.eosValue == "twu_et_al_1995":
thetas = []
self.b = 0
for i in range(self.n):
self.b += (
self.y[i] * (R_IG * self.Tcs[i]) * 0.0777960739039 / self.Pcs[i]
)
a = (R_IG * self.Tcs[i]) ** 2 * 0.457235528921 / self.Pcs[i]
alpha0 = (self.T / self.Tcs[i]) ** (
-0.171813
) * 2.718281828459045235360 ** (
0.125283 * (1 - (self.T / self.Tcs[i]) ** 1.77634)
)
alpha1 = (self.T / self.Tcs[i]) ** (
-0.607352
) * 2.718281828459045235360 ** (
0.511614 * (1 - (self.T / self.Tcs[i]) ** 2.20517)
)
alpha = alpha0 + self.omegas[i] * (alpha1 - alpha0)
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = self.b * 2
self.epsilon = -self.b ** 2
elif self.eosValue == "ahlers_gmehling_2001":
thetas = []
self.b = 0
c = 0
for i in range(self.n):
a = 0.45724 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
alpha = (
1.0
+ (
0.37464
+ 1.54226 * self.omegas[i]
- 0.2699 * self.omegas[i] ** 2
)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
gamma = 246.78 * self.Zcs[i] ** 2 - 107.21 * self.Zcs[i] + 12.67
n = -74.458 * self.Zcs[i] + 26.966
beta = 0.35 / (
0.35 + (n * np.abs((self.T / self.Tcs[i]) - alpha)) ** gamma
)
cc = (
(0.3074 - self.Zcs[i]) * R_IG * (self.T / self.Tcs[i]) / self.Pcs[i]
)
c += (cc * beta) * self.y[i]
self.b += self.y[i] * 0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i]))
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.b = self.b - c
self.delta = self.b * 2
self.epsilon = -self.b * self.b + 4 * self.b * c - 2 * c ** 2
elif self.eosValue == "gasem_et_al_pr_2001":
thetas = []
self.b = 0
for i in range(self.n):
a = 0.45724 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
self.b += self.y[i] * 0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i]))
alpha = (
1.0
+ (
0.386590
+ 1.50226 * self.omegas[i]
- 0.1687 * self.omegas[i] ** 2
)
* (1.0 - (self.T / self.Tcs[i]) ** 0.5)
) ** 2
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = 2 * self.b
self.epsilon = -self.b ** 2
elif self.eosValue == "gasem_et_al_twu_2001":
thetas = []
self.b = 0
for i in range(self.n):
a = 0.45724 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
self.b += self.y[i] * 0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i]))
alpha0 = (self.T / self.Tcs[i]) ** (
-0.207176
) * 2.718281828459045235360 ** (
0.092099 * (1 - (self.T / self.Tcs[i]) ** 1.94800)
)
alpha1 = (self.T / self.Tcs[i]) ** (
-0.502297
) * 2.718281828459045235360 ** (
0.603486 * (1 - (self.T / self.Tcs[i]) ** 2.09626)
)
alpha = alpha0 + self.omegas[i] * (alpha1 - alpha0)
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = 2 * self.b
self.epsilon = -self.b ** 2
elif self.eosValue == "gasem_et_al_2001":
thetas = []
self.b = 0
for i in range(self.n):
a = 0.45724 * (R_IG * self.Tcs[i]) ** 2 / self.Pcs[i]
self.b += self.y[i] * 0.07780 / (self.Pcs[i] / (R_IG * self.Tcs[i]))
A = 2.0
B = 0.836
C = 0.134
D = 0.508
E = -0.0467
Tr = self.T / self.Tcs[i]
w = self.omegas[i]
alpha = 2.718281828459045235360 ** (
(A + B * Tr) * (1.0 - Tr ** (C + w * (D + E * w)))
)
thetas.append(a * alpha)
self._calculate_theta_mixture(thetas)
self.delta = 2 * self.b
self.epsilon = -self.b ** 2
else:
raise ValueError("Equation of state doesn't exists in the current database")
# ========= END OF self._initialize() ===============
def _calculate_theta_mixture(self, thetas):
self.theta = 0
for i in range(self.n):
inner_sum = 0
for j in range(self.n):
inner_sum += (
self.y[i]
* self.y[j]
* sp.sqrt(thetas[i] * thetas[j])
* (1 - self.k[i][j])
)
self.theta += inner_sum
def _symb_calculate_theta_mixture(self, thetas):
for i in range(self.n):
inner_sum = 0
for j in range(self.n):
inner_sum += (
(self.symb_ys[i])
* (self.symb_ys[j])
* sp.sqrt(thetas[i] * thetas[j])
* (1 - self.k[i][j])
)
self.symb_thetam += inner_sum
def getAllProps(
self, Tref: float, T: float, Pref: float, P: float
) -> (Props, Props):
log = ""
zs = self.getZfromPT(P, T)
zliq, zvap = np.min(zs), np.max(zs)
vliq, vvap = zliq * R_IG * T / P, zvap * R_IG * T / P
avgMolWt = self.mix.getMolWt()
if avgMolWt:
rholiq, rhovap = avgMolWt * 1e-3 / vliq, avgMolWt * 1e-3 / vvap
else:
rholiq, rhovap = 0, 0
if self.mix.hasCp():
igprops = self.mix.getIGProps(Tref, T, Pref, P)
log += self.mix.getCpLog(Tref, T)
pliq, pvap = self.getCpHSGUA(Tref, T, Pref, P)
else:
igprops = 0
pliq, pvap = 0, 0
log += "Couldn't calculate properties: missing Cp paramaters"
fl, fv = self.getFugacity(P, T, vliq, zliq), self.getFugacity(P, T, vvap, zvap)
retPropsliq, retPropsvap = Props(), Props()
retPropsliq.Z, retPropsvap.Z = zliq, zvap
retPropsliq.V, retPropsvap.V = vliq, vvap
retPropsliq.rho, retPropsvap.rho = rholiq, rhovap
retPropsliq.P, retPropsvap.P = P, P
retPropsliq.T, retPropsvap.T = T, T
retPropsliq.Fugacity, retPropsvap.Fugacity = fl, fv
retPropsliq.IGProps, retPropsvap.IGProps = igprops, igprops
retPropsliq.Props, retPropsvap.Props = pliq, pvap
retPropsliq.log, retPropsvap.log = log, log
return retPropsliq, retPropsvap
def getCpHSGUA(self, Tref: float, T: float, Pref: float, P: float):
zs = self.getZfromPT(P, T)
zsref = self.getZfromPT(Pref, Tref)
zliq, zvap = np.min(zs), np.max(zs)
zliqref, zvapref = np.min(zsref), np.max(zsref)
vliq, vvap = zliq * R_IG * T / P, zvap * R_IG * T / P
vliqref, vvapref = zliqref * R_IG * Tref / Pref, zvapref * R_IG * Tref / Pref
igprop = self.mix.getIGProps(
Tref, T, Pref, P
) # make sure that mixture can handle single substances
ddp_liq = self.getDeltaDepartureProps(
Pref, Tref, vliqref, zliqref, P, T, vliq, zliq
)
ddp_vap = self.getDeltaDepartureProps(
Pref, Tref, vvapref, zvapref, P, T, vvap, zvap
)
pliq = igprop.subtract(ddp_liq)
pvap = igprop.subtract(ddp_vap)
return pliq, pvap
# lazy code, it can be improved a lot
def getPhi_i(self, i: int, _P: float, _T: float, _V: float, _Z: float) -> float:
symb_Z = self.V / (self.V - self.symb_bm) - self.V * (
self.symb_thetam / (R_IG * self.T)
) / (
(self.V - self.symb_bm)
* (self.V ** 2 + self.symb_deltam * self.V + self.symb_epsilonm)
)
integrand = (sp.diff(symb_Z, self.symb_ys[i]) - 1) / self.V
for j in range(self.n):
integrand = integrand.subs(self.symb_ys[j], self.y[j])
integrand_num = sp.lambdify(
[self.V, self.P, self.T], integrand, modules="numpy"
)
res = quad(integrand_num, _V, np.inf, args=(_P, _T))[0] - np.log(_Z)
a = np.exp(res)
return np.exp(res)
def _getPb_initial_guess(self, _T: float, _x) -> float:
_x = np.atleast_1d(_x)
pb = float(
np.sum(
_x
* self.Pcs
* np.exp(5.373 * (1 + self.omegas) * (1.0 - self.Tcs / _T))
)
)
return pb
def getBubblePointPressure(self, _T: float, x) -> float:
assert np.sum(x) == 1
x = np.atleast_1d(x)
Pb = self._getPb_initial_guess(_T, x)
k = np.log(self.Pcs / Pb) + 5.373 * (1.0 + self.omegas) * (1.0 - self.Tcs / _T)
y = x * k / np.sum(x * k)
tol = 1e-8
err = 1000
ite = 0
kmax = 10000
y = np.full(self.n, 1.0 / self.n)
phivap = np.empty(self.n, dtype=float)
philiq = np.empty(self.n, dtype=float)
while err > tol and ite < kmax:
ite += 1
vapmix = MixtureProp([s for s in self.mix.substances], y)
liqmix = MixtureProp([s for s in self.mix.substances], x)
vapeos = EOS(vapmix, self.k, self.eosDisplayName)
liqeos = EOS(liqmix, self.k, self.eosDisplayName)
zvap = np.max(vapeos.getZfromPT(Pb, _T))
zliq = np.min(liqeos.getZfromPT(Pb, _T))
vvap, vliq = R_IG * _T * zvap / Pb, R_IG * _T * zliq / Pb
for i in range(self.n):
phivap[i] = vapeos.getPhi_i(i, Pb, _T, vvap, zvap)
philiq[i] = liqeos.getPhi_i(i, Pb, _T, vliq, zliq)
k = philiq / phivap
y = x * k
yt = np.sum(y)
err = np.abs(yt - 1.0)
print(Pb)
print(y)
| 36.21902
| 99
| 0.416852
|
b4ebdcf097c1c77b913aaa12ba09ee487d40c9c0
| 1,234
|
py
|
Python
|
imps/tests/test_strings.py
|
bootandy/imps
|
21c84b788d44a296d8db4f655bedcef3fad12c36
|
[
"Apache-2.0"
] | 13
|
2017-07-11T18:43:32.000Z
|
2020-12-29T23:13:12.000Z
|
imps/tests/test_strings.py
|
bootandy/imps
|
21c84b788d44a296d8db4f655bedcef3fad12c36
|
[
"Apache-2.0"
] | 4
|
2018-06-15T09:33:11.000Z
|
2019-02-09T10:46:56.000Z
|
imps/tests/test_strings.py
|
bootandy/imps
|
21c84b788d44a296d8db4f655bedcef3fad12c36
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import absolute_import, division, print_function
from imps.strings import get_doc_string, TRIPLE_DOUBLE, TRIPLE_SINGLE
def test_doc_string_ignores_normal_line():
s = 'import A'
assert not get_doc_string(s)
def test_doc_string_ignores_doc_string_in_comment():
s = 'import A # triple comment \"\"\" '
assert not get_doc_string(s)
def test_doc_string_ignores_strings():
s = '''s = '\"\"\"' '''
assert not get_doc_string(s)
def test_doc_string_gets_data_after_a_string():
s = '''s = '\"\"\"' \"\"\" after a str \"\"\" '''
assert get_doc_string(s) == [(10, TRIPLE_DOUBLE), (29, TRIPLE_DOUBLE)]
def test_doc_string_simple():
s = '''\"\"\" a doc string \"\"\"'''
assert get_doc_string(s) == [(0, TRIPLE_DOUBLE), (20, TRIPLE_DOUBLE)]
def test_doc_string_with_hash():
s = '''\"\"\" a doc string with hash # \"\"\"'''
assert get_doc_string(s) == [(0, TRIPLE_DOUBLE), (32, TRIPLE_DOUBLE)]
def test_doc_string_not_on_newline():
s = '''import A \"\"\"'''
assert get_doc_string(s) == [(9, TRIPLE_DOUBLE)]
def test_doc_string_with_single_quotes():
s = """\'\'\'import A \'\'\'"""
assert get_doc_string(s) == [(0, TRIPLE_SINGLE), (15, TRIPLE_SINGLE)]
| 27.422222
| 74
| 0.648298
|
96258328ea70d922520f48c5455590338cd82c38
| 1,891
|
py
|
Python
|
python/models/models.py
|
thejoeejoee/UPA-MIT-VUT-2020-2021
|
578ac0ec057b23d71792bab9df34e0bc143aabd3
|
[
"MIT"
] | null | null | null |
python/models/models.py
|
thejoeejoee/UPA-MIT-VUT-2020-2021
|
578ac0ec057b23d71792bab9df34e0bc143aabd3
|
[
"MIT"
] | 5
|
2021-04-08T20:36:28.000Z
|
2021-09-22T19:39:33.000Z
|
python/models/models.py
|
thejoeejoee/UPA-MIT-VUT-2020-2021
|
578ac0ec057b23d71792bab9df34e0bc143aabd3
|
[
"MIT"
] | 1
|
2021-09-08T10:23:34.000Z
|
2021-09-08T10:23:34.000Z
|
from django.db import models
from django_extensions.db.fields import CreationDateTimeField
from bulk_update_or_create import BulkUpdateOrCreateQuerySet
class BaseModel(models.Model):
created = CreationDateTimeField()
objects = models.Manager()
bulk_objects = BulkUpdateOrCreateQuerySet.as_manager()
class Meta:
abstract = True
class Station(BaseModel):
wmo_id = models.IntegerField(primary_key=True)
location = models.CharField(max_length=128)
station_name = models.CharField(max_length=128)
station_height = models.FloatField()
latitude = models.FloatField()
longitude = models.FloatField()
class Meta:
db_table = 'station'
verbose_name = 'Station'
verbose_name_plural = 'Stations'
def __str__(self):
return f'{self.wmo_id}: {self.station_name} ({self.latitude}, {self.longitude}; {self.station_height})'
class BaseTimedStationDataModel(BaseModel):
station = models.ForeignKey(Station, on_delete=models.CASCADE)
timestamp = models.DateTimeField()
class Meta:
abstract = True
unique_together = (
('station', 'timestamp'),
)
ordering = ('timestamp', 'station')
class Temperature(BaseTimedStationDataModel):
temperature = models.FloatField()
class Meta(BaseTimedStationDataModel.Meta):
db_table = 'temperature'
verbose_name = 'Temperature'
verbose_name_plural = 'Temperatures'
def __str__(self):
return f'{self.station}: ({self.timestamp}, {self.temperature})'
class Rainfall(BaseTimedStationDataModel):
rainfall = models.FloatField()
class Meta(BaseTimedStationDataModel.Meta):
db_table = 'rainfall'
verbose_name = 'Rainfall'
verbose_name_plural = 'Rainfalls'
def __str__(self):
return f'{self.station}: ({self.timestamp}, {self.rainfall})'
| 28.223881
| 111
| 0.692226
|
70973f51b568a08fabc51bd4468332ab6037c1b7
| 2,636
|
py
|
Python
|
apps/applications/urls.py
|
SkyAdam1/zayed
|
340d7462fb1e37c60b2a03c6a05b15870f0cb8b8
|
[
"Apache-2.0"
] | 1
|
2021-04-12T16:40:01.000Z
|
2021-04-12T16:40:01.000Z
|
apps/applications/urls.py
|
SkyAdam1/zayed
|
340d7462fb1e37c60b2a03c6a05b15870f0cb8b8
|
[
"Apache-2.0"
] | null | null | null |
apps/applications/urls.py
|
SkyAdam1/zayed
|
340d7462fb1e37c60b2a03c6a05b15870f0cb8b8
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index_url"),
path(
"application_edit/<slug:pk>",
views.ApplicationUpdateView.as_view(),
name="application_update_url",
),
path(
"applications/",
views.ApplicationsOutputView.as_view(),
name="applications_output_url",
),
path(
"application/<int:pk>/",
views.ApplicationsOutputView.as_view(),
name="applications_output_pk_url",
),
path(
"application_add_expert/<int:pk>/",
views.ApplicationAddExpert.as_view(),
name="applications_add_expert_url",
),
path(
"application_remove_expert/<int:app>/<int:user>/",
views.remove_expert,
name="remove_expert_url",
),
path(
"application_create/",
views.ApplicationsCreateView.as_view(),
name="applications_create_url",
),
path(
"applications_delete/<int:pk>/",
views.ApplicationDelete.as_view(),
name="application_delete_url",
),
path(
"application_detail/<int:id>",
views.ApplicationsDetailView.as_view(),
name="applications_detail_url",
),
path(
"applications_status/<int:id>",
views.switch_application_status,
name="switch_status",
),
path(
"applications_approve/<int:id>",
views.switch_application_approve,
name="switch_status_reporta",
),
path(
"reports/",
views.ApplicationsReportingView.as_view(),
name="applications_reporting_url",
),
path(
"create_report/",
views.ApplicationReportView.as_view(),
name="applications_add_report_url",
),
path(
"report_detail/<int:id>",
views.ReportsDetail.as_view(),
name="reports_detail_url",
),
path(
"report_update/<slug:pk>",
views.ReportUpdateView.as_view(),
name="report_update_url",
),
path(
"report_delete/<int:pk>/",
views.ReportDelete.as_view(),
name="report_delete_url",
),
path(
"report_approve/<int:id>/", views.switch_report_status, name="switch_status_rep"
),
path("report_send/<int:id>/", views.send_report, name="send_report"),
path("update_remarks/<int:pk>/", views.delete_remarks, name="update_remarks_url"),
path("delete_comment/<int:pk>/", views.delete_comment, name="delete_comment"),
path("delete_remark/<int:pk>/", views.delete_remark, name="delete_remark"),
path("export_xls/", views.export_xls, name="export_xls"),
]
| 28.967033
| 88
| 0.615706
|
df4435720b7d9f192a28bcc7d6394770614a726c
| 966
|
py
|
Python
|
examples/real-world-data/superconduct/final_explainer.py
|
veneres/gef
|
07912d01040ca0169977ddd49839050c81ec2349
|
[
"MIT"
] | 2
|
2022-02-16T08:02:15.000Z
|
2022-02-16T13:07:53.000Z
|
examples/real-world-data/superconduct/final_explainer.py
|
veneres/gef
|
07912d01040ca0169977ddd49839050c81ec2349
|
[
"MIT"
] | null | null | null |
examples/real-world-data/superconduct/final_explainer.py
|
veneres/gef
|
07912d01040ca0169977ddd49839050c81ec2349
|
[
"MIT"
] | null | null | null |
import json
import pickle
from gamexplainer import GamExplainer
import lightgbm as lgbm
def main():
with open("config.json") as f:
config_dict = json.load(f)
model_path = config_dict["model_path"]
explainer_out = config_dict["explainer_out"]
forest = lgbm.Booster(model_file=model_path)
explanation_params = {
"verbose": False,
"interaction_importance_method": "count_path",
"feat_importance_method": "gain",
"n_spline_terms": 7,
"sample_method": "equi_size",
"sample_n": 4500,
"n_spline_per_term": 50,
"inter_max_distance": 64,
"n_inter_terms": 0,
"n_sample_gam": int(1e5),
"portion_sample_test": 0.3,
"classification": False
}
explainer = GamExplainer(**explanation_params)
explainer.explain(forest)
with open(explainer_out, "wb") as f:
pickle.dump(explainer, f)
if __name__ == '__main__':
main()
| 25.421053
| 54
| 0.634576
|
28afe335eaab9c196e4343902c35f1096904511d
| 710
|
py
|
Python
|
chat/forms.py
|
BastaAditya/Quiver
|
a6d29ec67341bd9cdb8a193ce1efcfd699aa4a96
|
[
"MIT"
] | null | null | null |
chat/forms.py
|
BastaAditya/Quiver
|
a6d29ec67341bd9cdb8a193ce1efcfd699aa4a96
|
[
"MIT"
] | null | null | null |
chat/forms.py
|
BastaAditya/Quiver
|
a6d29ec67341bd9cdb8a193ce1efcfd699aa4a96
|
[
"MIT"
] | null | null | null |
from django import forms
from .models import ChatMessage, ChatInfo
from loginsignup.utils import getBeaverInstanceFromUser
class ChatMessageForm(forms.ModelForm):
class Meta:
model = ChatMessage
fields = ["message","reply"]
def createNewMessage(self, urlparam, user):
status = super().is_valid()
if not status:
return status
message = self.cleaned_data.get("message")
reply = self.cleaned_data.get("reply")
beaver = getBeaverInstanceFromUser(user)
uuidUrlparam = ChatInfo.convertStringToUUID(urlparam)
response = ChatMessage.createMessage(uuidUrlparam, beaver, message, reply)
return response.get("status")
| 33.809524
| 82
| 0.690141
|
a5cf07b9a375bd60143ba9a28f414f1d5f0873ec
| 4,270
|
py
|
Python
|
src/queue/python/queue.py
|
SamyuelDanyo/data-structures
|
057fbebd5f4be3af81727c09970f843df7c43007
|
[
"MIT"
] | null | null | null |
src/queue/python/queue.py
|
SamyuelDanyo/data-structures
|
057fbebd5f4be3af81727c09970f843df7c43007
|
[
"MIT"
] | null | null | null |
src/queue/python/queue.py
|
SamyuelDanyo/data-structures
|
057fbebd5f4be3af81727c09970f843df7c43007
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#################################################
""" queue.py
# # Queue(FIFO) Implementation
# Supports:
# - Push.
# - Pop.
# - Peek.
"""
#################################################
# ### Author: Samyuel Danyo
# ### Date: 02/06/2020
# ### Last Edit: 02/06/2020
##################################################
# Array Implementation
##################################################
class Queue:
"""Queue(FIFO) array implementation."""
def __init__(self, cap):
self.cap = cap
self.data = [None]*cap
# Popping at the head
self.head = -1
# Pushing at the tail
self.tail = -1
def __str__(self):
return str(self.data)
def capacity(self):
"""Get the stack capacity."""
return self.cap
def is_empty(self):
"""True if the queue is empty."""
return self.head == self.tail == -1
def size(self):
"""Get the stack's current size"""
if self.is_empty():
return 0
if self.tail > self.head:
return self.tail - self.head + 1
return self.capacity() + self.tail - self.head + 1
def is_full(self):
"""True if the queue is full."""
return self.size() == self.capacity()
def push(self, value):
"""Push a value into the queue."""
if self.is_full():
print("ERROR:: Queue Overflow!")
return None
self.tail = (self.tail + 1) % self.capacity()
if self.head == -1:
self.head += 1
self.data[self.tail] = value
return True
def pop(self):
"""Pop a value off the queue."""
if self.is_empty():
print("ERROR:: Queue Underflow!")
return None
value = self.data[self.head]
if self.head == self.tail:
self.head = -1
self.tail = -1
else:
self.head = (self.head + 1) % self.capacity()
return value
def peek(self):
"""See the end(oldest) value in the queue."""
if self.is_empty():
print("WARNING:: Queue Empty!")
return None
return self.data[self.head]
##################################################
# List Implementation
##################################################
class QueueNode:
"""Node for a queue implementation."""
def __init__(self, data):
self.data = data
self.next = None
class ListQueue:
"""Queue(FIFO) list implementation."""
def __init__(self, cap):
self.cap = cap
self.num_els = 0
# Popping at the head
self.head = None
# Pushing at the tail
self.tail = None
def __str__(self):
data = []
node = self.head
while node:
data.append(str(node.data))
node = node.next
return "[" + ", ".join(data) + "]"
def capacity(self):
"""Get the stack capacity."""
return self.cap
def size(self):
"""Get the stack's current size"""
return self.num_els
def is_empty(self):
"""True if the queue is empty."""
return self.size() == 0
def is_full(self):
"""True if the queue is full."""
return self.size() == self.capacity()
def push(self, value):
"""Push a value into the queue."""
if self.is_full():
print("ERROR:: Queue Overflow!")
return None
data = QueueNode(value)
if self.num_els > 0:
self.tail.next = data
self.tail = self.tail.next
else:
self.tail = data
self.head = self.tail
self.num_els += 1
return True
def pop(self):
"""Pop a value off the queue."""
if self.is_empty():
print("ERROR:: Queue Underflow!")
return None
value = self.head.data
self.head = self.head.next
self.num_els -= 1
if self.num_els == 0:
self.tail = None
return value
def peek(self):
"""See the end(oldest) value in the queue."""
if self.is_empty():
print("WARNING:: Queue Empty!")
return None
return self.head.data
| 27.197452
| 58
| 0.479859
|
9184b423b8d4e2b3fefb3620464ef3096d327287
| 1,533
|
py
|
Python
|
packages/sklearn/_bak/pre/svm/nodes/svm___LinearSVCGetParams0/svm___LinearSVCGetParams0___METACODE.py
|
frecklebars/Ryven
|
86a8c06effc47897d0b8fbbd1fa8580a957f9515
|
[
"MIT"
] | 18
|
2021-01-18T09:52:41.000Z
|
2022-03-22T10:48:44.000Z
|
packages/sklearn/_bak/svm/nodes/svm___LinearSVCGetParams0/svm___LinearSVCGetParams0___METACODE.py
|
frecklebars/Ryven
|
86a8c06effc47897d0b8fbbd1fa8580a957f9515
|
[
"MIT"
] | null | null | null |
packages/sklearn/_bak/svm/nodes/svm___LinearSVCGetParams0/svm___LinearSVCGetParams0___METACODE.py
|
frecklebars/Ryven
|
86a8c06effc47897d0b8fbbd1fa8580a957f9515
|
[
"MIT"
] | 3
|
2021-01-18T09:49:42.000Z
|
2022-03-22T10:48:47.000Z
|
from NIENV import *
# API METHODS --------------
# self.main_widget
# self.update_shape()
# Ports
# self.input(index)
# self.set_output_val(index, val)
# self.exec_output(index)
# self.create_new_input(type_, label, widget_name=None, widget_pos='under', pos=-1)
# self.delete_input(index)
# self.create_new_output(type_, label, pos=-1)
# self.delete_output(index)
# Logging
# mylog = self.new_log('Example Log')
# mylog.log('I\'m alive!!')
# self.log_message('hello global!', target='global')
# self.log_message('that\'s not good', target='error')
# --------------------------
from sklearn.svm import LinearSVC
class %CLASS%(NodeInstance):
def __init__(self, params):
super(%CLASS%, self).__init__(params)
tmp = LinearSVC()
params = tmp.get_params()
for key in params:
self.create_new_output(type_="data", label=key, pos=-1)
del tmp
self.create_new_output(type_="data", label="param dict", pos=-1)
# self.special_actions['action name'] = {'method': M(self.action_method)}
# ...
def update_event(self, input_called=-1):
if input_called == 0:
model = self.input(1)
params = model.get_params()
i = 0
for param in params:
self.set_output_val(i, params[param])
i += 1
self.set_output_val(i, params)
def get_data(self):
data = {}
return data
def set_data(self, data):
pass
def removing(self):
pass
| 25.55
| 83
| 0.593607
|
c7dd45ec3f83ed5c34766e70823e0ad6b43c4d9d
| 260
|
py
|
Python
|
python/mathematics/palindrom.py
|
jtpio/algo-toolbox
|
d0f675889889ad52d853d948b0191bbd14c1e9cd
|
[
"MIT"
] | null | null | null |
python/mathematics/palindrom.py
|
jtpio/algo-toolbox
|
d0f675889889ad52d853d948b0191bbd14c1e9cd
|
[
"MIT"
] | null | null | null |
python/mathematics/palindrom.py
|
jtpio/algo-toolbox
|
d0f675889889ad52d853d948b0191bbd14c1e9cd
|
[
"MIT"
] | null | null | null |
def is_palindrom(c):
""" Return True if the string <c> is a palindrom """
c = str(c)
for i, e in enumerate(c):
if i > len(c)/2:
break
else:
if e != c[len(c)-i-1]:
return False
return True
| 23.636364
| 56
| 0.461538
|
c5631196824a44b488ff0040725cbfe50337ee4b
| 391
|
py
|
Python
|
pollers/pollers/wsgi.py
|
Hichem-Chabou/Pollers
|
9ece8b3680c674637cd28eaa95b5177941699a95
|
[
"MIT"
] | null | null | null |
pollers/pollers/wsgi.py
|
Hichem-Chabou/Pollers
|
9ece8b3680c674637cd28eaa95b5177941699a95
|
[
"MIT"
] | null | null | null |
pollers/pollers/wsgi.py
|
Hichem-Chabou/Pollers
|
9ece8b3680c674637cd28eaa95b5177941699a95
|
[
"MIT"
] | null | null | null |
"""
WSGI config for pollers project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pollers.settings')
application = get_wsgi_application()
| 23
| 78
| 0.785166
|
5977ea7857737af516cf73bd0ce9bc3ee968cf6d
| 1,206
|
py
|
Python
|
migration/migrator/migrations/course/20210527091235_remove_lock_date.py
|
jportorreal00/Submitty
|
c32256472e9cfaac7c13a3b07a558314eff447af
|
[
"BSD-3-Clause"
] | 411
|
2016-06-14T20:52:25.000Z
|
2022-03-31T21:20:25.000Z
|
migration/migrator/migrations/course/20210527091235_remove_lock_date.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 5,730
|
2016-05-23T21:04:32.000Z
|
2022-03-31T10:08:06.000Z
|
migration/migrator/migrations/course/20210527091235_remove_lock_date.py
|
KaelanWillauer/Submitty
|
cf9b6ceda15ec0a661e2ca81ea7864790094c64a
|
[
"BSD-3-Clause"
] | 423
|
2016-09-22T21:11:30.000Z
|
2022-03-29T18:55:28.000Z
|
"""Migration for a given Submitty course database."""
def up(config, database, semester, course):
"""
Run up migration.
:param config: Object holding configuration details about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
:param semester: Semester of the course being migrated
:type semester: str
:param course: Code of course being migrated
:type course: str
"""
database.execute("ALTER TABLE gradeable DROP COLUMN IF EXISTS g_grade_locked_date;")
def down(config, database, semester, course):
"""
Run down migration (rollback).
:param config: Object holding configuration details about Submitty
:type config: migrator.config.Config
:param database: Object for interacting with given database for environment
:type database: migrator.db.Database
:param semester: Semester of the course being migrated
:type semester: str
:param course: Code of course being migrated
:type course: str
"""
database.execute("ALTER TABLE gradeable ADD COLUMN IF NOT EXISTS g_grade_locked_date timestamptz;")
| 35.470588
| 103
| 0.730514
|
0c0401a2f9d37ebea7ef0dc79eeb96f2a8bccbc0
| 6,070
|
py
|
Python
|
nixpkgs_review/report.py
|
omasanori/nixpkgs-review
|
868eced5bc552c6f3756af9142263a1b6fe64d2c
|
[
"MIT"
] | 57
|
2018-03-17T17:32:37.000Z
|
2019-12-04T18:22:07.000Z
|
nixpkgs_review/report.py
|
omasanori/nixpkgs-review
|
868eced5bc552c6f3756af9142263a1b6fe64d2c
|
[
"MIT"
] | 33
|
2018-04-22T01:26:30.000Z
|
2019-12-05T15:51:28.000Z
|
nixpkgs_review/report.py
|
omasanori/nixpkgs-review
|
868eced5bc552c6f3756af9142263a1b6fe64d2c
|
[
"MIT"
] | 11
|
2018-05-28T10:35:19.000Z
|
2019-11-04T10:29:05.000Z
|
import os
import subprocess
import json
from pathlib import Path
from typing import Callable, List, Optional
from .nix import Attr
from .utils import info, link, warn
def print_number(
packages: List[Attr],
msg: str,
what: str = "package",
log: Callable[[str], None] = warn,
) -> None:
if len(packages) == 0:
return
plural = "s" if len(packages) > 1 else ""
names = (a.name for a in packages)
log(f"{len(packages)} {what}{plural} {msg}:")
log(" ".join(names))
log("")
def html_pkgs_section(packages: List[Attr], msg: str, what: str = "package") -> str:
if len(packages) == 0:
return ""
plural = "s" if len(packages) > 1 else ""
res = "<details>\n"
res += f" <summary>{len(packages)} {what}{plural} {msg}:</summary>\n <ul>\n"
for pkg in packages:
res += f" <li>{pkg.name}"
if len(pkg.aliases) > 0:
res += f" ({' ,'.join(pkg.aliases)})"
res += "</li>\n"
res += " </ul>\n</details>\n"
return res
class LazyDirectory:
def __init__(self, path: Path) -> None:
self.path = path
self.created = False
def ensure(self) -> Path:
if not self.created:
self.path.mkdir(exist_ok=True)
self.created = True
return self.path
def write_error_logs(attrs: List[Attr], directory: Path) -> None:
logs = LazyDirectory(directory.joinpath("logs"))
results = LazyDirectory(directory.joinpath("results"))
failed_results = LazyDirectory(directory.joinpath("failed_results"))
for attr in attrs:
if attr.path is not None and os.path.exists(attr.path):
if attr.was_build():
symlink_source = results.ensure().joinpath(attr.name)
else:
symlink_source = failed_results.ensure().joinpath(attr.name)
if os.path.lexists(symlink_source):
symlink_source.unlink()
symlink_source.symlink_to(attr.path)
for path in [attr.drv_path, attr.path]:
if not path:
continue
with open(logs.ensure().joinpath(attr.name + ".log"), "w+") as f:
nix_log = subprocess.run(
[
"nix",
"--experimental-features",
"nix-command",
"log",
path,
],
stdout=f,
)
if nix_log.returncode == 0:
break
class Report:
def __init__(self, system: str, attrs: List[Attr]) -> None:
self.system = system
self.attrs = attrs
self.broken: List[Attr] = []
self.failed: List[Attr] = []
self.non_existant: List[Attr] = []
self.blacklisted: List[Attr] = []
self.tests: List[Attr] = []
self.built: List[Attr] = []
for a in attrs:
if a.broken:
self.broken.append(a)
elif a.blacklisted:
self.blacklisted.append(a)
elif not a.exists:
self.non_existant.append(a)
elif a.name.startswith("nixosTests."):
self.tests.append(a)
elif not a.was_build():
self.failed.append(a)
else:
self.built.append(a)
def built_packages(self) -> List[str]:
return [a.name for a in self.built]
def write(self, directory: Path, pr: Optional[int]) -> None:
with open(directory.joinpath("report.md"), "w+") as f:
f.write(self.markdown(pr))
with open(directory.joinpath("report.json"), "w+") as f:
f.write(self.json(pr))
write_error_logs(self.attrs, directory)
def succeeded(self) -> bool:
"""Whether the report is considered a success or a failure"""
return len(self.failed) == 0
def json(self, pr: Optional[int]) -> str:
def serialize_attrs(attrs: List[Attr]) -> List[str]:
return list(map(lambda a: a.name, attrs))
return json.dumps(
{
"system": self.system,
"pr": pr,
"broken": serialize_attrs(self.broken),
"non-existant": serialize_attrs(self.non_existant),
"blacklisted": serialize_attrs(self.blacklisted),
"failed": serialize_attrs(self.failed),
"built": serialize_attrs(self.built),
"tests": serialize_attrs(self.tests),
},
sort_keys=True,
indent=4,
)
def markdown(self, pr: Optional[int]) -> str:
cmd = "nixpkgs-review"
if pr is not None:
cmd += f" pr {pr}"
msg = f"Result of `{cmd}` run on {self.system} [1](https://github.com/Mic92/nixpkgs-review)\n"
msg += html_pkgs_section(self.broken, "marked as broken and skipped")
msg += html_pkgs_section(
self.non_existant,
"present in ofBorgs evaluation, but not found in the checkout",
)
msg += html_pkgs_section(self.blacklisted, "blacklisted")
msg += html_pkgs_section(self.failed, "failed to build")
msg += html_pkgs_section(self.tests, "built", what="test")
msg += html_pkgs_section(self.built, "built")
return msg
def print_console(self, pr: Optional[int]) -> None:
if pr is not None:
pr_url = f"https://github.com/NixOS/nixpkgs/pull/{pr}"
info("\nLink to currently reviewing PR:")
link(f"\u001b]8;;{pr_url}\u001b\\{pr_url}\u001b]8;;\u001b\\\n")
print_number(self.broken, "marked as broken and skipped")
print_number(
self.non_existant,
"present in ofBorgs evaluation, but not found in the checkout",
)
print_number(self.blacklisted, "blacklisted")
print_number(self.failed, "failed to build")
print_number(self.tests, "built", what="tests", log=print)
print_number(self.built, "built", log=print)
| 34.101124
| 102
| 0.547776
|
12a75abc60dcb8a5b09cc139bf9706ead05905ab
| 1,830
|
py
|
Python
|
setup.py
|
neogeo-technologies/idgo
|
23e028b0d7fb2daf54d7e2954e0cc4d7b9be4210
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
neogeo-technologies/idgo
|
23e028b0d7fb2daf54d7e2954e0cc4d7b9be4210
|
[
"Apache-2.0"
] | 2
|
2018-09-14T07:12:00.000Z
|
2019-11-13T09:32:24.000Z
|
setup.py
|
neogeo-technologies/idgo
|
23e028b0d7fb2daf54d7e2954e0cc4d7b9be4210
|
[
"Apache-2.0"
] | 2
|
2019-03-25T08:27:43.000Z
|
2019-10-07T15:25:30.000Z
|
# Copyright (c) 2017-2021 Neogeo-Technologies.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os.path
from setuptools import find_packages
from setuptools import setup
version = '1.5.5'
def parse_requirements(filename):
with open(filename) as f:
lines = (line.strip() for line in f)
return [line for line in lines if line and not line.startswith('#')]
dirname = os.path.dirname(__file__)
reqs_filename = os.path.join(dirname, 'requirements.txt')
reqs = [str(req) for req in parse_requirements(reqs_filename)]
setup(
name='idgo',
version=version,
description='IDGO',
author='Neogeo Technologies',
author_email='contact@neogeo.fr',
url='https://git.neogeo.fr/idgo/apps/idgo',
license='Apache License, Version 2.0',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7'
],
packages=find_packages(where='.'),
install_requires=reqs,
)
| 31.551724
| 76
| 0.688525
|
86861d30214a43a95192d0f179be6ba705e002c8
| 1,737
|
py
|
Python
|
colossalai/nn/layer/colossalai_layer/normalization.py
|
RichardoLuo/ColossalAI
|
797a9dc5a9e801d7499b8667c3ef039a38aa15ba
|
[
"Apache-2.0"
] | 1,630
|
2021-10-30T01:00:27.000Z
|
2022-03-31T23:02:41.000Z
|
colossalai/nn/layer/colossalai_layer/normalization.py
|
RichardoLuo/ColossalAI
|
797a9dc5a9e801d7499b8667c3ef039a38aa15ba
|
[
"Apache-2.0"
] | 166
|
2021-10-30T01:03:01.000Z
|
2022-03-31T14:19:07.000Z
|
colossalai/nn/layer/colossalai_layer/normalization.py
|
RichardoLuo/ColossalAI
|
797a9dc5a9e801d7499b8667c3ef039a38aa15ba
|
[
"Apache-2.0"
] | 253
|
2021-10-30T06:10:29.000Z
|
2022-03-31T13:30:06.000Z
|
from colossalai.utils import get_current_device
from torch import nn
from ..parallel_1d import LayerNorm1D
from ..parallel_2d import LayerNorm2D
from ..parallel_2p5d import LayerNorm2p5D
from ..parallel_3d import LayerNorm3D
from ..utils import get_tensor_parallel_mode
from ..vanilla import VanillaLayerNorm
from ._utils import ColossalaiModule
_parallel_layernorm = {
None: VanillaLayerNorm,
"1d": LayerNorm1D,
"2d": LayerNorm2D,
"2.5d": LayerNorm2p5D,
"3d": LayerNorm3D,
}
class LayerNorm(ColossalaiModule):
r"""Layer Normalization for colossalai.
Args:
normalized_shape (int): input shape from an expected input of size.
:math:`[* \times \text{normalized_shape}[0] \times \text{normalized_shape}[1]
\times \ldots \times \text{normalized_shape}[-1]]`
If a single integer is used, it is treated as a singleton list, and this module will
normalize over the last dimension which is expected to be of that specific size.
eps (float): a value added to the denominator for numerical stability, defaults to 1e-05.
bias (bool, optional): Whether to add a bias, defaults to ``True``.
dtype (:class:`torch.dtype`, optional): The dtype of parameters, defaults to None.
"""
def __init__(self, normalized_shape: int, eps=1e-05, bias=True, dtype=None) -> None:
tensor_parallel = get_tensor_parallel_mode()
if tensor_parallel is None:
norm = nn.LayerNorm(normalized_shape, eps=eps).to(dtype).to(get_current_device())
else:
norm = _parallel_layernorm[tensor_parallel](normalized_shape, eps=eps, dtype=dtype)
super().__init__(norm)
| 41.357143
| 98
| 0.686241
|
3cbf1c52d1686baf3ec07901e69e4875f758b6be
| 15,880
|
py
|
Python
|
ivy/compiler/op_logging.py
|
ashok-arjun/ivy
|
274c03f667cda10e09b75e90e7a3a46b358e0358
|
[
"Apache-2.0"
] | 161
|
2021-01-20T22:11:13.000Z
|
2022-01-09T09:46:33.000Z
|
ivy/compiler/op_logging.py
|
ashok-arjun/ivy
|
274c03f667cda10e09b75e90e7a3a46b358e0358
|
[
"Apache-2.0"
] | 4
|
2021-11-10T17:04:36.000Z
|
2021-11-26T06:40:43.000Z
|
ivy/compiler/op_logging.py
|
ashok-arjun/ivy
|
274c03f667cda10e09b75e90e7a3a46b358e0358
|
[
"Apache-2.0"
] | 8
|
2021-02-17T20:56:33.000Z
|
2022-01-09T16:45:40.000Z
|
# global
import ivy
import time
import weakref
import inspect
import importlib
# local
from ivy.compiler import globals as glob
# noinspection PyProtectedMember
from ivy.compiler.helpers import _get_unique_id, _get_shape, _get_fn_signature, _clone_param, _delete_dependent_param,\
_args_n_kwarg_reprs_from_keys_n_args_n_kwargs, _output_reprs_from_output
# noinspection PyProtectedMember
from ivy.wrapper import _wrap_or_unwrap_methods, NON_WRAPPED_METHODS, ARRAYLESS_RET_METHODS
def _wrap_method_for_op_logging(fn, graph, limit_attributes=True, stateful_classes=None):
stateful_classes = tuple(ivy.default(stateful_classes, tuple()))
if (inspect.isclass(fn) or (hasattr(fn, '__name__') and
((fn.__name__[0] == '_' and fn.__name__ not in glob.ARRAY_BUILTINS) or
fn.__name__ in NON_WRAPPED_METHODS + ARRAYLESS_RET_METHODS)) or
(hasattr(fn, 'wrapped_for_compiling') and fn.wrapped_for_compiling)):
return fn
# noinspection PyUnresolvedReferences,PyProtectedMember
def _method_wrapped(*args, **kwargs):
# if cloning a param currently, return directly via the original function
if glob.wrapping_paused:
return fn(*args, **kwargs)
if glob.wrapped_stack:
# return if the wrapping is already happening on a higher level, and it's not a built-in which legitimately
# might need to be nested, unless it's a built-in recursion loop (ie for __getattribute__) in which case return
if (glob.wrapped_stack[-1].__name__[0:2] != '__' or
(glob.wrapped_stack[-1].__name__ == fn.__name__ and args == args and kwargs == kwargs)):
return fn(*args, **kwargs)
# return if the current method is a (possibly reversed) built-in operator, and the last entry of the wrapped
# stack is a version of that same operator
elif fn.__name__.replace('r', '').replace('_', '') in\
glob.wrapped_stack[-1].__name__.replace('r', '').replace('_', ''):
return fn(*args, **kwargs)
# attributes to ignore
if fn.__name__ in ['__getattr__', '__setattr__', '__getattribute__']:
att_name = args[1]
# return if the attribute being retrieved is another built-in method
if att_name[0:2] == '__':
return fn(*args, **kwargs)
# if the attribute is not recognized as one which can form part of the graph, then return
if limit_attributes and att_name not in glob.GRAPH_ATTRIBUTES[ivy.current_framework_str()]:
return fn(*args, **kwargs)
# otherwise, set wrapping as true
glob.wrapped_stack.append(fn)
# immutable tuple to mutable list
args = list(ivy.nested_map(args, lambda a: a, to_mutable=True))
kwargs = ivy.nested_map(kwargs, lambda v: v, to_mutable=True)
# get array idxs for positional args
# ToDo: work out why adding check_nests=True causes errors.
# This is needed in order to support stateful updates of ivy.Containers.
# arg_tracked_idxs = ivy.nested_indices_where(
# args, lambda x: ivy.is_array(x) or isinstance(x, stateful_classes), check_nests=True)
arg_tracked_idxs = ivy.nested_indices_where(
args, lambda x_: ivy.is_array(x_) or isinstance(x_, stateful_classes))
arg_vals = list(ivy.multi_index_nest(args, arg_tracked_idxs))
arg_param_ids = [_get_unique_id(x) for x in arg_vals]
for x in arg_vals:
glob.raw_pids_to_weakrefs[id(x)] = weakref.ref(x)
arg_param_types = [x.__class__ for x in arg_vals]
arg_param_var_flags = [ivy.is_variable(x, exclusive=True) for x in arg_vals]
arg_param_shapes = [_get_shape(x) for x in arg_vals]
# get array idxs for key-word args
# ToDo: work out why adding check_nests=True causes errors.
# This is needed in order to support stateful updates of ivy.Containers.
# kwarg_tracked_idxs = ivy.nested_indices_where(
# kwargs, lambda x: ivy.is_array(x) or isinstance(x, stateful_classes), check_nests=True)
kwarg_tracked_idxs = ivy.nested_indices_where(
kwargs, lambda x_: ivy.is_array(x_) or isinstance(x_, stateful_classes))
kwarg_vals = list(ivy.multi_index_nest(kwargs, kwarg_tracked_idxs))
kwarg_param_ids = [_get_unique_id(x) for x in kwarg_vals]
for x in kwarg_vals:
glob.raw_pids_to_weakrefs[id(x)] = weakref.ref(x)
kwarg_param_types = [x.__class__ for x in kwarg_vals]
kwarg_param_var_flags = [ivy.is_variable(x, exclusive=True) for x in kwarg_vals]
kwarg_param_shapes = [_get_shape(x) for x in kwarg_vals]
# set the backend function
backend_fn = fn
# compute the return
glob.wrapping_paused = True
ret_raw = fn(*args, **kwargs)
glob.wrapping_paused = False
# provide return value for __setattr__
if fn.__name__ == '__setattr__':
ret_raw = args[0]
# update the setattr method to return the object after attribute setting
def backend_fn(__obj, __name, __value):
setattr(__obj, __name, __value)
return __obj
# remove parameters from args and kwargs
ivy.map_nest_at_indices(args, arg_tracked_idxs, lambda x_: _delete_dependent_param(x_, graph))
ivy.map_nest_at_indices(kwargs, kwarg_tracked_idxs, lambda x_: _delete_dependent_param(x_, graph))
# covert return to list
ret_listified = False
if isinstance(ret_raw, tuple):
ret = list(ret_raw)
else:
ret = [ret_raw]
ret_listified = True
# get array idxs for return
# ToDo: work out why adding check_nests=True causes errors.
# This is needed in order to support stateful updates of ivy.Containers.
# output_tracked_idxs = ivy.nested_indices_where(
# ret, lambda x: ivy.is_array(x) or isinstance(x, stateful_classes), check_nests=True)
output_tracked_idxs = ivy.nested_indices_where(
ret, lambda x_: ivy.is_array(x_) or isinstance(x_, stateful_classes))
output_vals = list(ivy.multi_index_nest(ret, output_tracked_idxs))
output_param_ids = [_get_unique_id(x) for x in output_vals]
output_param_types = [x.__class__ for x in output_vals]
output_param_var_flags = [ivy.is_variable(x, exclusive=True) for x in output_vals]
output_param_shapes = [_get_shape(x) for x in output_vals]
# clone the param when getting an attribute, to preserve uniqueness in the graph
if fn.__name__ in ['__getattr__', '__getattribute__']:
# update the param_id for each param in the retreived attribute in the graph
ivy.map_nest_at_indices(ret, output_tracked_idxs, lambda x: _clone_param(x, graph))
# find all duplicate param ids from the input in the return
duplicates = list()
for i, ret_pid in enumerate(output_param_ids):
if ret_pid in arg_param_ids + kwarg_param_ids:
duplicates.append(i)
# clone all repeated return parameters to give unique parameter ids in the graph
duplicate_tracked_idxs = [output_tracked_idxs[i] for i in duplicates]
ivy.map_nest_at_indices(ret, duplicate_tracked_idxs, lambda x: _clone_param(x, graph))
# get return param ids after cloning
output_vals = list(ivy.multi_index_nest(ret, output_tracked_idxs))
output_param_ids = [_get_unique_id(x) for x in output_vals]
for x in output_vals:
glob.raw_pids_to_weakrefs[id(x)] = weakref.ref(x)
# maybe add to set of dependent_pids
if fn.__name__ in glob.GENERATOR_METHODS and graph.include_generators:
[glob.dependent_pids.add(pid) for pid in output_param_ids]
else:
for pid in arg_param_ids + kwarg_param_ids:
if pid in glob.dependent_pids:
[glob.dependent_pids.add(pid) for pid in output_param_ids]
break
# wrap the function
def new_fn(arg_array_vals, kwarg_array_vals):
# ToDo: make this as efficient as possible; this is performed at runtime
args_writeable = ivy.copy_nest(args)
kwargs_writeable = ivy.copy_nest(kwargs)
ivy.set_nest_at_indices(args_writeable, arg_tracked_idxs, arg_array_vals)
ivy.set_nest_at_indices(kwargs_writeable, kwarg_tracked_idxs, kwarg_array_vals)
return backend_fn(*args_writeable, **kwargs_writeable)
# wrap the function with timing
def new_fn_w_timing(arg_array_vals, kwarg_array_vals):
start = time.perf_counter()
args_writeable = ivy.copy_nest(args)
kwargs_writeable = ivy.copy_nest(kwargs)
graph.update_inference_times('2_0_arg_n_kwarg_copying', time.perf_counter() - start)
start = time.perf_counter()
ivy.set_nest_at_indices(args_writeable, arg_tracked_idxs, arg_array_vals)
ivy.set_nest_at_indices(kwargs_writeable, kwarg_tracked_idxs, kwarg_array_vals)
graph.update_inference_times('2_1_arg_n_kwarg_writing', time.perf_counter() - start)
start = time.perf_counter()
ret_ = backend_fn(*args_writeable, **kwargs_writeable)
graph.update_inference_times('2_2_backend_fn', time.perf_counter() - start)
return ret_
# add function attributes which inform about the arguments and returns
glob.wrapping_paused = True
if glob.time_inference:
new_fn = new_fn_w_timing
new_fn.arg_reprs = str(args)
new_fn.arg_tracked_idxs = arg_tracked_idxs
new_fn.arg_param_ids = arg_param_ids
new_fn.arg_param_types = arg_param_types
new_fn.arg_param_var_flags = arg_param_var_flags
new_fn.arg_param_shapes = arg_param_shapes
new_fn.kwarg_reprs = str(kwargs)
new_fn.kwarg_tracked_idxs = kwarg_tracked_idxs
new_fn.kwarg_param_ids = kwarg_param_ids
new_fn.kwarg_param_types = kwarg_param_types
new_fn.kwarg_param_var_flags = kwarg_param_var_flags
new_fn.kwarg_param_shapes = kwarg_param_shapes
try:
sig = inspect.signature(fn)
sig_keys = list(sig.parameters.keys())
except ValueError:
sig_keys = list()
new_fn.arg_n_kwarg_reprs = _args_n_kwarg_reprs_from_keys_n_args_n_kwargs(sig_keys, args, kwargs)
new_fn.output_tracked_idxs = output_tracked_idxs
new_fn.output_param_ids = output_param_ids
new_fn.output_param_types = output_param_types
new_fn.output_param_var_flags = output_param_var_flags
new_fn.output_param_shapes = output_param_shapes
new_fn.output_reprs = _output_reprs_from_output(ret)
new_fn.timestamp = time.perf_counter()
new_fn.signature = _get_fn_signature(backend_fn)
new_fn.terminal = True
new_fn.is_constant = len(arg_param_ids + kwarg_param_ids) == 0 and \
(not graph.include_generators or
fn.__name__ not in glob.GENERATOR_METHODS[ivy.current_framework_str()])
glob.wrapping_paused = False
fns_in = [graph._pid_to_functions_dict[pid]
for pid in arg_param_ids + kwarg_param_ids if pid in graph._pid_to_functions_dict]
for fn_in in fns_in:
fn_in.terminal = False
if new_fn not in fn_in.fns_out:
fn_in.fns_out.append(new_fn)
new_fn.fns_in = fns_in
new_fn.fns_out = list()
new_fn.__repr__ = lambda: new_fn.__name__
if hasattr(fn, '__name__'):
new_fn.__name__ = fn.__name__
# add to graph if compiling
if glob.op_logging:
# add this function to the graph for each output pid
for pid in output_param_ids:
if pid in graph._pid_to_functions_dict:
graph._register_output(ret)
glob.op_logging = False
_unwrap_methods_from_op_logging(list(graph._stateful_classes))
# noinspection PyBroadException
try:
graph.show(save_to_disk=True, output_connected_only=False)
except Exception:
pass
raise Exception(
'\n\ntried to add {} to graph._functions_dict, but function {} with the same output pid {} '
'already exists!'.format(
new_fn.__name__ + '(*{}, **{})'.format(new_fn.arg_reprs, new_fn.kwarg_reprs),
graph._pid_to_functions_dict[pid].__name__ + '(*{}, **{})'.format(
graph._pid_to_functions_dict[pid].arg_reprs,
graph._pid_to_functions_dict[pid].kwarg_reprs), pid))
graph.add_fn_to_dict(pid, new_fn)
# unset wrapping as true
glob.wrapped_stack.pop(-1)
# return the function output
return ret[0] if ret_listified else tuple(ret)
if hasattr(fn, '__name__'):
_method_wrapped.__name__ = fn.__name__
_method_wrapped.wrapped_for_compiling = True
_method_wrapped.inner_fn = fn
return _method_wrapped
def _unwrap_method_from_op_logging(method_wrapped):
if not hasattr(method_wrapped, 'wrapped_for_compiling') or not method_wrapped.wrapped_for_compiling:
return method_wrapped
return method_wrapped.inner_fn
def _wrap_methods_for_op_logging(graph, stateful_classes=None):
# wrap backend framework
classes_to_wrap = [getattr(importlib.import_module(ctw[0]), ctw[1])
for ctw in glob.CLASSES_TO_WRAP[ivy.current_framework_str()]]
_wrap_or_unwrap_methods(
lambda fn: _wrap_method_for_op_logging(fn, graph), classes_to_wrap=classes_to_wrap, native=True)
# wrap stateful classes
stateful_classes = ivy.default(stateful_classes, [])
for cls in stateful_classes:
assert hasattr(cls, '__setattr__') and (hasattr(cls, '__getattr__') or hasattr(cls, '__getattribute__'))
cls.__setattr__ = _wrap_method_for_op_logging(
cls.__setattr__, graph, limit_attributes=False, stateful_classes=stateful_classes)
if hasattr(cls, '__getattr__'):
cls.__getattr__ = _wrap_method_for_op_logging(
cls.__getattr__, graph, limit_attributes=False, stateful_classes=stateful_classes)
if hasattr(cls, '__getattribute__'):
cls.__getattribute__ = _wrap_method_for_op_logging(
cls.__getattribute__, graph, limit_attributes=False, stateful_classes=stateful_classes)
def _unwrap_methods_from_op_logging(stateful_classes=None):
# unwrap backend framework
classes_to_wrap = [getattr(importlib.import_module(ctw[0]), ctw[1])
for ctw in glob.CLASSES_TO_WRAP[ivy.current_framework_str()]] + stateful_classes
_wrap_or_unwrap_methods(
lambda fn: _unwrap_method_from_op_logging(fn), classes_to_wrap=classes_to_wrap, native=True)
# unwrap stateful classes
stateful_classes = ivy.default(stateful_classes, [])
for cls in stateful_classes:
assert hasattr(cls, '__setattr__') and (hasattr(cls, '__getattr__') or hasattr(cls, '__getattribute__'))
cls.__setattr__ = _unwrap_method_from_op_logging(cls.__setattr__)
if hasattr(cls, '__getattr__'):
cls.__getattr__ = _unwrap_method_from_op_logging(cls.__getattr__)
if hasattr(cls, '__getattribute__'):
cls.__getattribute__ = _unwrap_method_from_op_logging(cls.__getattribute__)
| 47.831325
| 123
| 0.666373
|
4d4e3b72de39d6424d156730caa005eebf947ffa
| 587,680
|
py
|
Python
|
juju/client/_client2.py
|
tai271828/python-libjuju
|
af10ed0a60a6ab2d93ad329e208832f49d164952
|
[
"Apache-2.0"
] | null | null | null |
juju/client/_client2.py
|
tai271828/python-libjuju
|
af10ed0a60a6ab2d93ad329e208832f49d164952
|
[
"Apache-2.0"
] | null | null | null |
juju/client/_client2.py
|
tai271828/python-libjuju
|
af10ed0a60a6ab2d93ad329e208832f49d164952
|
[
"Apache-2.0"
] | null | null | null |
# DO NOT CHANGE THIS FILE! This file is auto-generated by facade.py.
# Changes will be overwritten/lost when the file is regenerated.
from juju.client.facade import Type, ReturnMapping
from juju.client._definitions import *
class ActionFacade(Type):
name = 'Action'
version = 2
schema = {'definitions': {'Action': {'additionalProperties': False,
'properties': {'name': {'type': 'string'},
'parameters': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'receiver': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag', 'receiver', 'name'],
'type': 'object'},
'ActionResult': {'additionalProperties': False,
'properties': {'action': {'$ref': '#/definitions/Action'},
'completed': {'format': 'date-time',
'type': 'string'},
'enqueued': {'format': 'date-time',
'type': 'string'},
'error': {'$ref': '#/definitions/Error'},
'message': {'type': 'string'},
'output': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'started': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'type': 'object'},
'ActionResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ActionResult'},
'type': 'array'}},
'type': 'object'},
'ActionSpec': {'additionalProperties': False,
'properties': {'description': {'type': 'string'},
'params': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['description', 'params'],
'type': 'object'},
'Actions': {'additionalProperties': False,
'properties': {'actions': {'items': {'$ref': '#/definitions/Action'},
'type': 'array'}},
'type': 'object'},
'ActionsByName': {'additionalProperties': False,
'properties': {'actions': {'items': {'$ref': '#/definitions/ActionResult'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'name': {'type': 'string'}},
'type': 'object'},
'ActionsByNames': {'additionalProperties': False,
'properties': {'actions': {'items': {'$ref': '#/definitions/ActionsByName'},
'type': 'array'}},
'type': 'object'},
'ActionsByReceiver': {'additionalProperties': False,
'properties': {'actions': {'items': {'$ref': '#/definitions/ActionResult'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'receiver': {'type': 'string'}},
'type': 'object'},
'ActionsByReceivers': {'additionalProperties': False,
'properties': {'actions': {'items': {'$ref': '#/definitions/ActionsByReceiver'},
'type': 'array'}},
'type': 'object'},
'ApplicationCharmActionsResult': {'additionalProperties': False,
'properties': {'actions': {'patternProperties': {'.*': {'$ref': '#/definitions/ActionSpec'}},
'type': 'object'},
'application-tag': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ApplicationsCharmActionsResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ApplicationCharmActionsResult'},
'type': 'array'}},
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'FindActionsByNames': {'additionalProperties': False,
'properties': {'names': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'FindTags': {'additionalProperties': False,
'properties': {'prefixes': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['prefixes'],
'type': 'object'},
'FindTagsResults': {'additionalProperties': False,
'properties': {'matches': {'patternProperties': {'.*': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'type': 'object'}},
'required': ['matches'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'RunParams': {'additionalProperties': False,
'properties': {'applications': {'items': {'type': 'string'},
'type': 'array'},
'commands': {'type': 'string'},
'machines': {'items': {'type': 'string'},
'type': 'array'},
'timeout': {'type': 'integer'},
'units': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['commands', 'timeout'],
'type': 'object'}},
'properties': {'Actions': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ActionResults'}},
'type': 'object'},
'ApplicationsCharmsActions': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ApplicationsCharmActionsResults'}},
'type': 'object'},
'Cancel': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ActionResults'}},
'type': 'object'},
'Enqueue': {'properties': {'Params': {'$ref': '#/definitions/Actions'},
'Result': {'$ref': '#/definitions/ActionResults'}},
'type': 'object'},
'FindActionTagsByPrefix': {'properties': {'Params': {'$ref': '#/definitions/FindTags'},
'Result': {'$ref': '#/definitions/FindTagsResults'}},
'type': 'object'},
'FindActionsByNames': {'properties': {'Params': {'$ref': '#/definitions/FindActionsByNames'},
'Result': {'$ref': '#/definitions/ActionsByNames'}},
'type': 'object'},
'ListAll': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ActionsByReceivers'}},
'type': 'object'},
'ListCompleted': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ActionsByReceivers'}},
'type': 'object'},
'ListPending': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ActionsByReceivers'}},
'type': 'object'},
'ListRunning': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ActionsByReceivers'}},
'type': 'object'},
'Run': {'properties': {'Params': {'$ref': '#/definitions/RunParams'},
'Result': {'$ref': '#/definitions/ActionResults'}},
'type': 'object'},
'RunOnAllMachines': {'properties': {'Params': {'$ref': '#/definitions/RunParams'},
'Result': {'$ref': '#/definitions/ActionResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ActionResults)
async def Actions(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ActionResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='Actions',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ApplicationsCharmActionsResults)
async def ApplicationsCharmsActions(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ApplicationCharmActionsResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='ApplicationsCharmsActions',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionResults)
async def Cancel(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ActionResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='Cancel',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionResults)
async def Enqueue(self, actions=None):
'''
actions : typing.Sequence[~Action]
Returns -> typing.Sequence[~ActionResult]
'''
if actions is not None and not isinstance(actions, (bytes, str, list)):
raise Exception("Expected actions to be a Sequence, received: {}".format(type(actions)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='Enqueue',
version=2,
params=_params)
_params['actions'] = actions
reply = await self.rpc(msg)
return reply
@ReturnMapping(FindTagsResults)
async def FindActionTagsByPrefix(self, prefixes=None):
'''
prefixes : typing.Sequence[str]
Returns -> typing.Sequence[~Entity]
'''
if prefixes is not None and not isinstance(prefixes, (bytes, str, list)):
raise Exception("Expected prefixes to be a Sequence, received: {}".format(type(prefixes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='FindActionTagsByPrefix',
version=2,
params=_params)
_params['prefixes'] = prefixes
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionsByNames)
async def FindActionsByNames(self, names=None):
'''
names : typing.Sequence[str]
Returns -> typing.Sequence[~ActionsByName]
'''
if names is not None and not isinstance(names, (bytes, str, list)):
raise Exception("Expected names to be a Sequence, received: {}".format(type(names)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='FindActionsByNames',
version=2,
params=_params)
_params['names'] = names
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionsByReceivers)
async def ListAll(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ActionsByReceiver]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='ListAll',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionsByReceivers)
async def ListCompleted(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ActionsByReceiver]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='ListCompleted',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionsByReceivers)
async def ListPending(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ActionsByReceiver]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='ListPending',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionsByReceivers)
async def ListRunning(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ActionsByReceiver]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='ListRunning',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionResults)
async def Run(self, applications=None, commands=None, machines=None, timeout=None, units=None):
'''
applications : typing.Sequence[str]
commands : str
machines : typing.Sequence[str]
timeout : int
units : typing.Sequence[str]
Returns -> typing.Sequence[~ActionResult]
'''
if applications is not None and not isinstance(applications, (bytes, str, list)):
raise Exception("Expected applications to be a Sequence, received: {}".format(type(applications)))
if commands is not None and not isinstance(commands, (bytes, str)):
raise Exception("Expected commands to be a str, received: {}".format(type(commands)))
if machines is not None and not isinstance(machines, (bytes, str, list)):
raise Exception("Expected machines to be a Sequence, received: {}".format(type(machines)))
if timeout is not None and not isinstance(timeout, int):
raise Exception("Expected timeout to be a int, received: {}".format(type(timeout)))
if units is not None and not isinstance(units, (bytes, str, list)):
raise Exception("Expected units to be a Sequence, received: {}".format(type(units)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='Run',
version=2,
params=_params)
_params['applications'] = applications
_params['commands'] = commands
_params['machines'] = machines
_params['timeout'] = timeout
_params['units'] = units
reply = await self.rpc(msg)
return reply
@ReturnMapping(ActionResults)
async def RunOnAllMachines(self, applications=None, commands=None, machines=None, timeout=None, units=None):
'''
applications : typing.Sequence[str]
commands : str
machines : typing.Sequence[str]
timeout : int
units : typing.Sequence[str]
Returns -> typing.Sequence[~ActionResult]
'''
if applications is not None and not isinstance(applications, (bytes, str, list)):
raise Exception("Expected applications to be a Sequence, received: {}".format(type(applications)))
if commands is not None and not isinstance(commands, (bytes, str)):
raise Exception("Expected commands to be a str, received: {}".format(type(commands)))
if machines is not None and not isinstance(machines, (bytes, str, list)):
raise Exception("Expected machines to be a Sequence, received: {}".format(type(machines)))
if timeout is not None and not isinstance(timeout, int):
raise Exception("Expected timeout to be a int, received: {}".format(type(timeout)))
if units is not None and not isinstance(units, (bytes, str, list)):
raise Exception("Expected units to be a Sequence, received: {}".format(type(units)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Action',
request='RunOnAllMachines',
version=2,
params=_params)
_params['applications'] = applications
_params['commands'] = commands
_params['machines'] = machines
_params['timeout'] = timeout
_params['units'] = units
reply = await self.rpc(msg)
return reply
class AgentFacade(Type):
name = 'Agent'
version = 2
schema = {'definitions': {'AgentGetEntitiesResult': {'additionalProperties': False,
'properties': {'container-type': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'},
'jobs': {'items': {'type': 'string'},
'type': 'array'},
'life': {'type': 'string'}},
'required': ['life',
'jobs',
'container-type'],
'type': 'object'},
'AgentGetEntitiesResults': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/AgentGetEntitiesResult'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'CloudCredential': {'additionalProperties': False,
'properties': {'attrs': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'auth-type': {'type': 'string'},
'redacted': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['auth-type'],
'type': 'object'},
'CloudSpec': {'additionalProperties': False,
'properties': {'cacertificates': {'items': {'type': 'string'},
'type': 'array'},
'credential': {'$ref': '#/definitions/CloudCredential'},
'endpoint': {'type': 'string'},
'identity-endpoint': {'type': 'string'},
'name': {'type': 'string'},
'region': {'type': 'string'},
'storage-endpoint': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['type', 'name'],
'type': 'object'},
'CloudSpecResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/CloudSpec'}},
'type': 'object'},
'CloudSpecResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/CloudSpecResult'},
'type': 'array'}},
'type': 'object'},
'ControllerAPIInfoResult': {'additionalProperties': False,
'properties': {'addresses': {'items': {'type': 'string'},
'type': 'array'},
'cacert': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['addresses',
'cacert'],
'type': 'object'},
'ControllerAPIInfoResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ControllerAPIInfoResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ControllerConfigResult': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityPassword': {'additionalProperties': False,
'properties': {'password': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag', 'password'],
'type': 'object'},
'EntityPasswords': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/EntityPassword'},
'type': 'array'}},
'required': ['changes'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'IsMasterResult': {'additionalProperties': False,
'properties': {'master': {'type': 'boolean'}},
'required': ['master'],
'type': 'object'},
'ModelConfigResult': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelTag': {'additionalProperties': False, 'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'NotifyWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/NotifyWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'StateServingInfo': {'additionalProperties': False,
'properties': {'api-port': {'type': 'integer'},
'ca-private-key': {'type': 'string'},
'cert': {'type': 'string'},
'controller-api-port': {'type': 'integer'},
'private-key': {'type': 'string'},
'shared-secret': {'type': 'string'},
'state-port': {'type': 'integer'},
'system-identity': {'type': 'string'}},
'required': ['api-port',
'state-port',
'cert',
'private-key',
'ca-private-key',
'shared-secret',
'system-identity'],
'type': 'object'}},
'properties': {'ClearReboot': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'CloudSpec': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/CloudSpecResults'}},
'type': 'object'},
'ControllerAPIInfoForModels': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ControllerAPIInfoResults'}},
'type': 'object'},
'ControllerConfig': {'properties': {'Result': {'$ref': '#/definitions/ControllerConfigResult'}},
'type': 'object'},
'GetCloudSpec': {'properties': {'Params': {'$ref': '#/definitions/ModelTag'},
'Result': {'$ref': '#/definitions/CloudSpecResult'}},
'type': 'object'},
'GetEntities': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/AgentGetEntitiesResults'}},
'type': 'object'},
'IsMaster': {'properties': {'Result': {'$ref': '#/definitions/IsMasterResult'}},
'type': 'object'},
'ModelConfig': {'properties': {'Result': {'$ref': '#/definitions/ModelConfigResult'}},
'type': 'object'},
'SetPasswords': {'properties': {'Params': {'$ref': '#/definitions/EntityPasswords'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'StateServingInfo': {'properties': {'Result': {'$ref': '#/definitions/StateServingInfo'}},
'type': 'object'},
'WatchCloudSpecsChanges': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/NotifyWatchResults'}},
'type': 'object'},
'WatchCredentials': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/NotifyWatchResults'}},
'type': 'object'},
'WatchForModelConfigChanges': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def ClearReboot(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='ClearReboot',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(CloudSpecResults)
async def CloudSpec(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~CloudSpecResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='CloudSpec',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ControllerAPIInfoResults)
async def ControllerAPIInfoForModels(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ControllerAPIInfoResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='ControllerAPIInfoForModels',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ControllerConfigResult)
async def ControllerConfig(self):
'''
Returns -> typing.Mapping[str, typing.Any]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='ControllerConfig',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(CloudSpecResult)
async def GetCloudSpec(self):
'''
Returns -> typing.Union[_ForwardRef('Error'), _ForwardRef('CloudSpec')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='GetCloudSpec',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(AgentGetEntitiesResults)
async def GetEntities(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~AgentGetEntitiesResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='GetEntities',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(IsMasterResult)
async def IsMaster(self):
'''
Returns -> bool
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='IsMaster',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelConfigResult)
async def ModelConfig(self):
'''
Returns -> typing.Mapping[str, typing.Any]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='ModelConfig',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetPasswords(self, changes=None):
'''
changes : typing.Sequence[~EntityPassword]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='SetPasswords',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(StateServingInfo)
async def StateServingInfo(self):
'''
Returns -> typing.Union[int, str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='StateServingInfo',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResults)
async def WatchCloudSpecsChanges(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~NotifyWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='WatchCloudSpecsChanges',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResults)
async def WatchCredentials(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~NotifyWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='WatchCredentials',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchForModelConfigChanges(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Agent',
request='WatchForModelConfigChanges',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class AllModelWatcherFacade(Type):
name = 'AllModelWatcher'
version = 2
schema = {'definitions': {'AllWatcherNextResults': {'additionalProperties': False,
'properties': {'deltas': {'items': {'$ref': '#/definitions/Delta'},
'type': 'array'}},
'required': ['deltas'],
'type': 'object'},
'Delta': {'additionalProperties': False,
'properties': {'entity': {'additionalProperties': True,
'type': 'object'},
'removed': {'type': 'boolean'}},
'required': ['removed', 'entity'],
'type': 'object'}},
'properties': {'Next': {'properties': {'Result': {'$ref': '#/definitions/AllWatcherNextResults'}},
'type': 'object'},
'Stop': {'type': 'object'}},
'type': 'object'}
@ReturnMapping(AllWatcherNextResults)
async def Next(self):
'''
Returns -> typing.Sequence[~Delta]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='AllModelWatcher',
request='Next',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Stop(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='AllModelWatcher',
request='Stop',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class AnnotationsFacade(Type):
name = 'Annotations'
version = 2
schema = {'definitions': {'AnnotationsGetResult': {'additionalProperties': False,
'properties': {'annotations': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'entity': {'type': 'string'},
'error': {'$ref': '#/definitions/ErrorResult'}},
'required': ['entity', 'annotations'],
'type': 'object'},
'AnnotationsGetResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/AnnotationsGetResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'AnnotationsSet': {'additionalProperties': False,
'properties': {'annotations': {'items': {'$ref': '#/definitions/EntityAnnotations'},
'type': 'array'}},
'required': ['annotations'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityAnnotations': {'additionalProperties': False,
'properties': {'annotations': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'entity': {'type': 'string'}},
'required': ['entity', 'annotations'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'}},
'properties': {'Get': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/AnnotationsGetResults'}},
'type': 'object'},
'Set': {'properties': {'Params': {'$ref': '#/definitions/AnnotationsSet'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(AnnotationsGetResults)
async def Get(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~AnnotationsGetResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Annotations',
request='Get',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def Set(self, annotations=None):
'''
annotations : typing.Sequence[~EntityAnnotations]
Returns -> typing.Sequence[~ErrorResult]
'''
if annotations is not None and not isinstance(annotations, (bytes, str, list)):
raise Exception("Expected annotations to be a Sequence, received: {}".format(type(annotations)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Annotations',
request='Set',
version=2,
params=_params)
_params['annotations'] = annotations
reply = await self.rpc(msg)
return reply
class ApplicationFacade(Type):
name = 'Application'
version = 2
schema = {'definitions': {'AddApplicationUnits': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'num-units': {'type': 'integer'},
'placement': {'items': {'$ref': '#/definitions/Placement'},
'type': 'array'}},
'required': ['application',
'num-units',
'placement'],
'type': 'object'},
'AddApplicationUnitsResults': {'additionalProperties': False,
'properties': {'units': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['units'],
'type': 'object'},
'AddRelation': {'additionalProperties': False,
'properties': {'endpoints': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['endpoints'],
'type': 'object'},
'AddRelationResults': {'additionalProperties': False,
'properties': {'endpoints': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmRelation'}},
'type': 'object'}},
'required': ['endpoints'],
'type': 'object'},
'ApplicationCharmRelations': {'additionalProperties': False,
'properties': {'application': {'type': 'string'}},
'required': ['application'],
'type': 'object'},
'ApplicationCharmRelationsResults': {'additionalProperties': False,
'properties': {'charm-relations': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['charm-relations'],
'type': 'object'},
'ApplicationDeploy': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'channel': {'type': 'string'},
'charm-url': {'type': 'string'},
'config': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'config-yaml': {'type': 'string'},
'constraints': {'$ref': '#/definitions/Value'},
'endpoint-bindings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'num-units': {'type': 'integer'},
'placement': {'items': {'$ref': '#/definitions/Placement'},
'type': 'array'},
'resources': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'series': {'type': 'string'},
'storage': {'patternProperties': {'.*': {'$ref': '#/definitions/Constraints'}},
'type': 'object'}},
'required': ['application',
'series',
'charm-url',
'channel',
'num-units',
'config-yaml',
'constraints'],
'type': 'object'},
'ApplicationDestroy': {'additionalProperties': False,
'properties': {'application': {'type': 'string'}},
'required': ['application'],
'type': 'object'},
'ApplicationExpose': {'additionalProperties': False,
'properties': {'application': {'type': 'string'}},
'required': ['application'],
'type': 'object'},
'ApplicationGet': {'additionalProperties': False,
'properties': {'application': {'type': 'string'}},
'required': ['application'],
'type': 'object'},
'ApplicationGetResults': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'charm': {'type': 'string'},
'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'constraints': {'$ref': '#/definitions/Value'},
'series': {'type': 'string'}},
'required': ['application',
'charm',
'config',
'constraints',
'series'],
'type': 'object'},
'ApplicationMetricCredential': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'metrics-credentials': {'items': {'type': 'integer'},
'type': 'array'}},
'required': ['application',
'metrics-credentials'],
'type': 'object'},
'ApplicationMetricCredentials': {'additionalProperties': False,
'properties': {'creds': {'items': {'$ref': '#/definitions/ApplicationMetricCredential'},
'type': 'array'}},
'required': ['creds'],
'type': 'object'},
'ApplicationSet': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'options': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'}},
'required': ['application', 'options'],
'type': 'object'},
'ApplicationSetCharm': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'channel': {'type': 'string'},
'charm-url': {'type': 'string'},
'config-settings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'config-settings-yaml': {'type': 'string'},
'force-series': {'type': 'boolean'},
'force-units': {'type': 'boolean'},
'resource-ids': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'storage-constraints': {'patternProperties': {'.*': {'$ref': '#/definitions/StorageConstraints'}},
'type': 'object'}},
'required': ['application',
'charm-url',
'channel',
'force-units',
'force-series'],
'type': 'object'},
'ApplicationUnexpose': {'additionalProperties': False,
'properties': {'application': {'type': 'string'}},
'required': ['application'],
'type': 'object'},
'ApplicationUnset': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'options': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['application', 'options'],
'type': 'object'},
'ApplicationUpdate': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'charm-url': {'type': 'string'},
'constraints': {'$ref': '#/definitions/Value'},
'force-charm-url': {'type': 'boolean'},
'force-series': {'type': 'boolean'},
'min-units': {'type': 'integer'},
'settings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'settings-yaml': {'type': 'string'}},
'required': ['application',
'charm-url',
'force-charm-url',
'force-series',
'settings-yaml'],
'type': 'object'},
'ApplicationsDeploy': {'additionalProperties': False,
'properties': {'applications': {'items': {'$ref': '#/definitions/ApplicationDeploy'},
'type': 'array'}},
'required': ['applications'],
'type': 'object'},
'CharmRelation': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'limit': {'type': 'integer'},
'name': {'type': 'string'},
'optional': {'type': 'boolean'},
'role': {'type': 'string'},
'scope': {'type': 'string'}},
'required': ['name',
'role',
'interface',
'optional',
'limit',
'scope'],
'type': 'object'},
'Constraints': {'additionalProperties': False,
'properties': {'Count': {'type': 'integer'},
'Pool': {'type': 'string'},
'Size': {'type': 'integer'}},
'required': ['Pool', 'Size', 'Count'],
'type': 'object'},
'DestroyApplicationUnits': {'additionalProperties': False,
'properties': {'unit-names': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['unit-names'],
'type': 'object'},
'DestroyRelation': {'additionalProperties': False,
'properties': {'endpoints': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['endpoints'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'GetApplicationConstraints': {'additionalProperties': False,
'properties': {'application': {'type': 'string'}},
'required': ['application'],
'type': 'object'},
'GetConstraintsResults': {'additionalProperties': False,
'properties': {'constraints': {'$ref': '#/definitions/Value'}},
'required': ['constraints'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'Placement': {'additionalProperties': False,
'properties': {'directive': {'type': 'string'},
'scope': {'type': 'string'}},
'required': ['scope', 'directive'],
'type': 'object'},
'SetConstraints': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'constraints': {'$ref': '#/definitions/Value'}},
'required': ['application', 'constraints'],
'type': 'object'},
'StorageConstraints': {'additionalProperties': False,
'properties': {'count': {'type': 'integer'},
'pool': {'type': 'string'},
'size': {'type': 'integer'}},
'type': 'object'},
'StringResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'required': ['result'],
'type': 'object'},
'Value': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'container': {'type': 'string'},
'cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'instance-type': {'type': 'string'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'spaces': {'items': {'type': 'string'},
'type': 'array'},
'tags': {'items': {'type': 'string'},
'type': 'array'},
'virt-type': {'type': 'string'}},
'type': 'object'}},
'properties': {'AddRelation': {'properties': {'Params': {'$ref': '#/definitions/AddRelation'},
'Result': {'$ref': '#/definitions/AddRelationResults'}},
'type': 'object'},
'AddUnits': {'properties': {'Params': {'$ref': '#/definitions/AddApplicationUnits'},
'Result': {'$ref': '#/definitions/AddApplicationUnitsResults'}},
'type': 'object'},
'CharmRelations': {'properties': {'Params': {'$ref': '#/definitions/ApplicationCharmRelations'},
'Result': {'$ref': '#/definitions/ApplicationCharmRelationsResults'}},
'type': 'object'},
'Deploy': {'properties': {'Params': {'$ref': '#/definitions/ApplicationsDeploy'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Destroy': {'properties': {'Params': {'$ref': '#/definitions/ApplicationDestroy'}},
'type': 'object'},
'DestroyRelation': {'properties': {'Params': {'$ref': '#/definitions/DestroyRelation'}},
'type': 'object'},
'DestroyUnits': {'properties': {'Params': {'$ref': '#/definitions/DestroyApplicationUnits'}},
'type': 'object'},
'Expose': {'properties': {'Params': {'$ref': '#/definitions/ApplicationExpose'}},
'type': 'object'},
'Get': {'properties': {'Params': {'$ref': '#/definitions/ApplicationGet'},
'Result': {'$ref': '#/definitions/ApplicationGetResults'}},
'type': 'object'},
'GetCharmURL': {'properties': {'Params': {'$ref': '#/definitions/ApplicationGet'},
'Result': {'$ref': '#/definitions/StringResult'}},
'type': 'object'},
'GetConstraints': {'properties': {'Params': {'$ref': '#/definitions/GetApplicationConstraints'},
'Result': {'$ref': '#/definitions/GetConstraintsResults'}},
'type': 'object'},
'Set': {'properties': {'Params': {'$ref': '#/definitions/ApplicationSet'}},
'type': 'object'},
'SetCharm': {'properties': {'Params': {'$ref': '#/definitions/ApplicationSetCharm'}},
'type': 'object'},
'SetConstraints': {'properties': {'Params': {'$ref': '#/definitions/SetConstraints'}},
'type': 'object'},
'SetMetricCredentials': {'properties': {'Params': {'$ref': '#/definitions/ApplicationMetricCredentials'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Unexpose': {'properties': {'Params': {'$ref': '#/definitions/ApplicationUnexpose'}},
'type': 'object'},
'Unset': {'properties': {'Params': {'$ref': '#/definitions/ApplicationUnset'}},
'type': 'object'},
'Update': {'properties': {'Params': {'$ref': '#/definitions/ApplicationUpdate'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(AddRelationResults)
async def AddRelation(self, endpoints=None):
'''
endpoints : typing.Sequence[str]
Returns -> typing.Mapping[str, ~CharmRelation]
'''
if endpoints is not None and not isinstance(endpoints, (bytes, str, list)):
raise Exception("Expected endpoints to be a Sequence, received: {}".format(type(endpoints)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='AddRelation',
version=2,
params=_params)
_params['endpoints'] = endpoints
reply = await self.rpc(msg)
return reply
@ReturnMapping(AddApplicationUnitsResults)
async def AddUnits(self, application=None, num_units=None, placement=None):
'''
application : str
num_units : int
placement : typing.Sequence[~Placement]
Returns -> typing.Sequence[str]
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if num_units is not None and not isinstance(num_units, int):
raise Exception("Expected num_units to be a int, received: {}".format(type(num_units)))
if placement is not None and not isinstance(placement, (bytes, str, list)):
raise Exception("Expected placement to be a Sequence, received: {}".format(type(placement)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='AddUnits',
version=2,
params=_params)
_params['application'] = application
_params['num-units'] = num_units
_params['placement'] = placement
reply = await self.rpc(msg)
return reply
@ReturnMapping(ApplicationCharmRelationsResults)
async def CharmRelations(self, application=None):
'''
application : str
Returns -> typing.Sequence[str]
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='CharmRelations',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def Deploy(self, applications=None):
'''
applications : typing.Sequence[~ApplicationDeploy]
Returns -> typing.Sequence[~ErrorResult]
'''
if applications is not None and not isinstance(applications, (bytes, str, list)):
raise Exception("Expected applications to be a Sequence, received: {}".format(type(applications)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Deploy',
version=2,
params=_params)
_params['applications'] = applications
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Destroy(self, application=None):
'''
application : str
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Destroy',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def DestroyRelation(self, endpoints=None):
'''
endpoints : typing.Sequence[str]
Returns -> None
'''
if endpoints is not None and not isinstance(endpoints, (bytes, str, list)):
raise Exception("Expected endpoints to be a Sequence, received: {}".format(type(endpoints)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='DestroyRelation',
version=2,
params=_params)
_params['endpoints'] = endpoints
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def DestroyUnits(self, unit_names=None):
'''
unit_names : typing.Sequence[str]
Returns -> None
'''
if unit_names is not None and not isinstance(unit_names, (bytes, str, list)):
raise Exception("Expected unit_names to be a Sequence, received: {}".format(type(unit_names)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='DestroyUnits',
version=2,
params=_params)
_params['unit-names'] = unit_names
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Expose(self, application=None):
'''
application : str
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Expose',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(ApplicationGetResults)
async def Get(self, application=None):
'''
application : str
Returns -> typing.Union[str, typing.Mapping[str, typing.Any], _ForwardRef('Value')]
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Get',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringResult)
async def GetCharmURL(self, application=None):
'''
application : str
Returns -> typing.Union[_ForwardRef('Error'), str]
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='GetCharmURL',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(GetConstraintsResults)
async def GetConstraints(self, application=None):
'''
application : str
Returns -> Value
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='GetConstraints',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Set(self, application=None, options=None):
'''
application : str
options : typing.Mapping[str, str]
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if options is not None and not isinstance(options, dict):
raise Exception("Expected options to be a Mapping, received: {}".format(type(options)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Set',
version=2,
params=_params)
_params['application'] = application
_params['options'] = options
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetCharm(self, application=None, channel=None, charm_url=None, config_settings=None, config_settings_yaml=None, force_series=None, force_units=None, resource_ids=None, storage_constraints=None):
'''
application : str
channel : str
charm_url : str
config_settings : typing.Mapping[str, str]
config_settings_yaml : str
force_series : bool
force_units : bool
resource_ids : typing.Mapping[str, str]
storage_constraints : typing.Mapping[str, ~StorageConstraints]
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if channel is not None and not isinstance(channel, (bytes, str)):
raise Exception("Expected channel to be a str, received: {}".format(type(channel)))
if charm_url is not None and not isinstance(charm_url, (bytes, str)):
raise Exception("Expected charm_url to be a str, received: {}".format(type(charm_url)))
if config_settings is not None and not isinstance(config_settings, dict):
raise Exception("Expected config_settings to be a Mapping, received: {}".format(type(config_settings)))
if config_settings_yaml is not None and not isinstance(config_settings_yaml, (bytes, str)):
raise Exception("Expected config_settings_yaml to be a str, received: {}".format(type(config_settings_yaml)))
if force_series is not None and not isinstance(force_series, bool):
raise Exception("Expected force_series to be a bool, received: {}".format(type(force_series)))
if force_units is not None and not isinstance(force_units, bool):
raise Exception("Expected force_units to be a bool, received: {}".format(type(force_units)))
if resource_ids is not None and not isinstance(resource_ids, dict):
raise Exception("Expected resource_ids to be a Mapping, received: {}".format(type(resource_ids)))
if storage_constraints is not None and not isinstance(storage_constraints, dict):
raise Exception("Expected storage_constraints to be a Mapping, received: {}".format(type(storage_constraints)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='SetCharm',
version=2,
params=_params)
_params['application'] = application
_params['channel'] = channel
_params['charm-url'] = charm_url
_params['config-settings'] = config_settings
_params['config-settings-yaml'] = config_settings_yaml
_params['force-series'] = force_series
_params['force-units'] = force_units
_params['resource-ids'] = resource_ids
_params['storage-constraints'] = storage_constraints
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetConstraints(self, application=None, constraints=None):
'''
application : str
constraints : Value
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if constraints is not None and not isinstance(constraints, (dict, Value)):
raise Exception("Expected constraints to be a Value, received: {}".format(type(constraints)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='SetConstraints',
version=2,
params=_params)
_params['application'] = application
_params['constraints'] = constraints
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetMetricCredentials(self, creds=None):
'''
creds : typing.Sequence[~ApplicationMetricCredential]
Returns -> typing.Sequence[~ErrorResult]
'''
if creds is not None and not isinstance(creds, (bytes, str, list)):
raise Exception("Expected creds to be a Sequence, received: {}".format(type(creds)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='SetMetricCredentials',
version=2,
params=_params)
_params['creds'] = creds
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Unexpose(self, application=None):
'''
application : str
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Unexpose',
version=2,
params=_params)
_params['application'] = application
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Unset(self, application=None, options=None):
'''
application : str
options : typing.Sequence[str]
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if options is not None and not isinstance(options, (bytes, str, list)):
raise Exception("Expected options to be a Sequence, received: {}".format(type(options)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Unset',
version=2,
params=_params)
_params['application'] = application
_params['options'] = options
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Update(self, application=None, charm_url=None, constraints=None, force_charm_url=None, force_series=None, min_units=None, settings=None, settings_yaml=None):
'''
application : str
charm_url : str
constraints : Value
force_charm_url : bool
force_series : bool
min_units : int
settings : typing.Mapping[str, str]
settings_yaml : str
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if charm_url is not None and not isinstance(charm_url, (bytes, str)):
raise Exception("Expected charm_url to be a str, received: {}".format(type(charm_url)))
if constraints is not None and not isinstance(constraints, (dict, Value)):
raise Exception("Expected constraints to be a Value, received: {}".format(type(constraints)))
if force_charm_url is not None and not isinstance(force_charm_url, bool):
raise Exception("Expected force_charm_url to be a bool, received: {}".format(type(force_charm_url)))
if force_series is not None and not isinstance(force_series, bool):
raise Exception("Expected force_series to be a bool, received: {}".format(type(force_series)))
if min_units is not None and not isinstance(min_units, int):
raise Exception("Expected min_units to be a int, received: {}".format(type(min_units)))
if settings is not None and not isinstance(settings, dict):
raise Exception("Expected settings to be a Mapping, received: {}".format(type(settings)))
if settings_yaml is not None and not isinstance(settings_yaml, (bytes, str)):
raise Exception("Expected settings_yaml to be a str, received: {}".format(type(settings_yaml)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Application',
request='Update',
version=2,
params=_params)
_params['application'] = application
_params['charm-url'] = charm_url
_params['constraints'] = constraints
_params['force-charm-url'] = force_charm_url
_params['force-series'] = force_series
_params['min-units'] = min_units
_params['settings'] = settings
_params['settings-yaml'] = settings_yaml
reply = await self.rpc(msg)
return reply
class ApplicationOffersFacade(Type):
name = 'ApplicationOffers'
version = 2
schema = {'definitions': {'AddApplicationOffer': {'additionalProperties': False,
'properties': {'application-description': {'type': 'string'},
'application-name': {'type': 'string'},
'endpoints': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'model-tag': {'type': 'string'},
'offer-name': {'type': 'string'}},
'required': ['model-tag',
'offer-name',
'application-name',
'application-description',
'endpoints'],
'type': 'object'},
'AddApplicationOffers': {'additionalProperties': False,
'properties': {'Offers': {'items': {'$ref': '#/definitions/AddApplicationOffer'},
'type': 'array'}},
'required': ['Offers'],
'type': 'object'},
'ApplicationOfferAdminDetails': {'additionalProperties': False,
'properties': {'ApplicationOfferDetails': {'$ref': '#/definitions/ApplicationOfferDetails'},
'application-description': {'type': 'string'},
'application-name': {'type': 'string'},
'bindings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'charm-url': {'type': 'string'},
'connections': {'items': {'$ref': '#/definitions/OfferConnection'},
'type': 'array'},
'endpoints': {'items': {'$ref': '#/definitions/RemoteEndpoint'},
'type': 'array'},
'offer-name': {'type': 'string'},
'offer-url': {'type': 'string'},
'offer-uuid': {'type': 'string'},
'source-model-tag': {'type': 'string'},
'spaces': {'items': {'$ref': '#/definitions/RemoteSpace'},
'type': 'array'},
'users': {'items': {'$ref': '#/definitions/OfferUserDetails'},
'type': 'array'}},
'required': ['source-model-tag',
'offer-uuid',
'offer-url',
'offer-name',
'application-description',
'ApplicationOfferDetails',
'application-name',
'charm-url'],
'type': 'object'},
'ApplicationOfferDetails': {'additionalProperties': False,
'properties': {'application-description': {'type': 'string'},
'bindings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'endpoints': {'items': {'$ref': '#/definitions/RemoteEndpoint'},
'type': 'array'},
'offer-name': {'type': 'string'},
'offer-url': {'type': 'string'},
'offer-uuid': {'type': 'string'},
'source-model-tag': {'type': 'string'},
'spaces': {'items': {'$ref': '#/definitions/RemoteSpace'},
'type': 'array'},
'users': {'items': {'$ref': '#/definitions/OfferUserDetails'},
'type': 'array'}},
'required': ['source-model-tag',
'offer-uuid',
'offer-url',
'offer-name',
'application-description'],
'type': 'object'},
'ApplicationOfferResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/ApplicationOfferAdminDetails'}},
'type': 'object'},
'ApplicationOffersResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ApplicationOfferResult'},
'type': 'array'}},
'type': 'object'},
'ConsumeOfferDetails': {'additionalProperties': False,
'properties': {'external-controller': {'$ref': '#/definitions/ExternalControllerInfo'},
'macaroon': {'$ref': '#/definitions/Macaroon'},
'offer': {'$ref': '#/definitions/ApplicationOfferDetails'}},
'type': 'object'},
'ConsumeOfferDetailsResult': {'additionalProperties': False,
'properties': {'ConsumeOfferDetails': {'$ref': '#/definitions/ConsumeOfferDetails'},
'error': {'$ref': '#/definitions/Error'},
'external-controller': {'$ref': '#/definitions/ExternalControllerInfo'},
'macaroon': {'$ref': '#/definitions/Macaroon'},
'offer': {'$ref': '#/definitions/ApplicationOfferDetails'}},
'required': ['ConsumeOfferDetails'],
'type': 'object'},
'ConsumeOfferDetailsResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ConsumeOfferDetailsResult'},
'type': 'array'}},
'type': 'object'},
'DestroyApplicationOffers': {'additionalProperties': False,
'properties': {'force': {'type': 'boolean'},
'offer-urls': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['offer-urls'],
'type': 'object'},
'EndpointFilterAttributes': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'name': {'type': 'string'},
'role': {'type': 'string'}},
'required': ['role',
'interface',
'name'],
'type': 'object'},
'EntityStatus': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'since': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'required': ['status', 'info', 'since'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ExternalControllerInfo': {'additionalProperties': False,
'properties': {'addrs': {'items': {'type': 'string'},
'type': 'array'},
'ca-cert': {'type': 'string'},
'controller-alias': {'type': 'string'},
'controller-tag': {'type': 'string'}},
'required': ['controller-tag',
'controller-alias',
'addrs',
'ca-cert'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'ModifyOfferAccess': {'additionalProperties': False,
'properties': {'access': {'type': 'string'},
'action': {'type': 'string'},
'offer-url': {'type': 'string'},
'user-tag': {'type': 'string'}},
'required': ['user-tag',
'action',
'access',
'offer-url'],
'type': 'object'},
'ModifyOfferAccessRequest': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/ModifyOfferAccess'},
'type': 'array'}},
'required': ['changes'],
'type': 'object'},
'OfferConnection': {'additionalProperties': False,
'properties': {'endpoint': {'type': 'string'},
'ingress-subnets': {'items': {'type': 'string'},
'type': 'array'},
'relation-id': {'type': 'integer'},
'source-model-tag': {'type': 'string'},
'status': {'$ref': '#/definitions/EntityStatus'},
'username': {'type': 'string'}},
'required': ['source-model-tag',
'relation-id',
'username',
'endpoint',
'status',
'ingress-subnets'],
'type': 'object'},
'OfferFilter': {'additionalProperties': False,
'properties': {'allowed-users': {'items': {'type': 'string'},
'type': 'array'},
'application-description': {'type': 'string'},
'application-name': {'type': 'string'},
'application-user': {'type': 'string'},
'connected-users': {'items': {'type': 'string'},
'type': 'array'},
'endpoints': {'items': {'$ref': '#/definitions/EndpointFilterAttributes'},
'type': 'array'},
'model-name': {'type': 'string'},
'offer-name': {'type': 'string'},
'owner-name': {'type': 'string'}},
'required': ['owner-name',
'model-name',
'offer-name',
'application-name',
'application-description',
'application-user',
'endpoints',
'connected-users',
'allowed-users'],
'type': 'object'},
'OfferFilters': {'additionalProperties': False,
'properties': {'Filters': {'items': {'$ref': '#/definitions/OfferFilter'},
'type': 'array'}},
'required': ['Filters'],
'type': 'object'},
'OfferURLs': {'additionalProperties': False,
'properties': {'bakery-version': {'type': 'integer'},
'offer-urls': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'OfferUserDetails': {'additionalProperties': False,
'properties': {'access': {'type': 'string'},
'display-name': {'type': 'string'},
'user': {'type': 'string'}},
'required': ['user',
'display-name',
'access'],
'type': 'object'},
'QueryApplicationOffersResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ApplicationOfferAdminDetails'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RemoteApplicationInfo': {'additionalProperties': False,
'properties': {'description': {'type': 'string'},
'endpoints': {'items': {'$ref': '#/definitions/RemoteEndpoint'},
'type': 'array'},
'icon-url-path': {'type': 'string'},
'model-tag': {'type': 'string'},
'name': {'type': 'string'},
'offer-url': {'type': 'string'},
'source-model-label': {'type': 'string'}},
'required': ['model-tag',
'name',
'description',
'offer-url',
'endpoints',
'icon-url-path'],
'type': 'object'},
'RemoteApplicationInfoResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/RemoteApplicationInfo'}},
'type': 'object'},
'RemoteApplicationInfoResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RemoteApplicationInfoResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RemoteEndpoint': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'limit': {'type': 'integer'},
'name': {'type': 'string'},
'role': {'type': 'string'}},
'required': ['name',
'role',
'interface',
'limit'],
'type': 'object'},
'RemoteSpace': {'additionalProperties': False,
'properties': {'cloud-type': {'type': 'string'},
'name': {'type': 'string'},
'provider-attributes': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'provider-id': {'type': 'string'},
'subnets': {'items': {'$ref': '#/definitions/Subnet'},
'type': 'array'}},
'required': ['cloud-type',
'name',
'provider-id',
'provider-attributes',
'subnets'],
'type': 'object'},
'Subnet': {'additionalProperties': False,
'properties': {'cidr': {'type': 'string'},
'life': {'type': 'string'},
'provider-id': {'type': 'string'},
'provider-network-id': {'type': 'string'},
'provider-space-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'status': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['cidr',
'vlan-tag',
'life',
'space-tag',
'zones'],
'type': 'object'}},
'properties': {'ApplicationOffers': {'properties': {'Params': {'$ref': '#/definitions/OfferURLs'},
'Result': {'$ref': '#/definitions/ApplicationOffersResults'}},
'type': 'object'},
'DestroyOffers': {'properties': {'Params': {'$ref': '#/definitions/DestroyApplicationOffers'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'FindApplicationOffers': {'properties': {'Params': {'$ref': '#/definitions/OfferFilters'},
'Result': {'$ref': '#/definitions/QueryApplicationOffersResults'}},
'type': 'object'},
'GetConsumeDetails': {'properties': {'Params': {'$ref': '#/definitions/OfferURLs'},
'Result': {'$ref': '#/definitions/ConsumeOfferDetailsResults'}},
'type': 'object'},
'ListApplicationOffers': {'properties': {'Params': {'$ref': '#/definitions/OfferFilters'},
'Result': {'$ref': '#/definitions/QueryApplicationOffersResults'}},
'type': 'object'},
'ModifyOfferAccess': {'properties': {'Params': {'$ref': '#/definitions/ModifyOfferAccessRequest'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Offer': {'properties': {'Params': {'$ref': '#/definitions/AddApplicationOffers'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'RemoteApplicationInfo': {'properties': {'Params': {'$ref': '#/definitions/OfferURLs'},
'Result': {'$ref': '#/definitions/RemoteApplicationInfoResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ApplicationOffersResults)
async def ApplicationOffers(self, bakery_version=None, offer_urls=None):
'''
bakery_version : int
offer_urls : typing.Sequence[str]
Returns -> typing.Sequence[~ApplicationOfferResult]
'''
if bakery_version is not None and not isinstance(bakery_version, int):
raise Exception("Expected bakery_version to be a int, received: {}".format(type(bakery_version)))
if offer_urls is not None and not isinstance(offer_urls, (bytes, str, list)):
raise Exception("Expected offer_urls to be a Sequence, received: {}".format(type(offer_urls)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='ApplicationOffers',
version=2,
params=_params)
_params['bakery-version'] = bakery_version
_params['offer-urls'] = offer_urls
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def DestroyOffers(self, force=None, offer_urls=None):
'''
force : bool
offer_urls : typing.Sequence[str]
Returns -> typing.Sequence[~ErrorResult]
'''
if force is not None and not isinstance(force, bool):
raise Exception("Expected force to be a bool, received: {}".format(type(force)))
if offer_urls is not None and not isinstance(offer_urls, (bytes, str, list)):
raise Exception("Expected offer_urls to be a Sequence, received: {}".format(type(offer_urls)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='DestroyOffers',
version=2,
params=_params)
_params['force'] = force
_params['offer-urls'] = offer_urls
reply = await self.rpc(msg)
return reply
@ReturnMapping(QueryApplicationOffersResults)
async def FindApplicationOffers(self, filters=None):
'''
filters : typing.Sequence[~OfferFilter]
Returns -> typing.Sequence[~ApplicationOfferAdminDetails]
'''
if filters is not None and not isinstance(filters, (bytes, str, list)):
raise Exception("Expected filters to be a Sequence, received: {}".format(type(filters)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='FindApplicationOffers',
version=2,
params=_params)
_params['Filters'] = filters
reply = await self.rpc(msg)
return reply
@ReturnMapping(ConsumeOfferDetailsResults)
async def GetConsumeDetails(self, bakery_version=None, offer_urls=None):
'''
bakery_version : int
offer_urls : typing.Sequence[str]
Returns -> typing.Sequence[~ConsumeOfferDetailsResult]
'''
if bakery_version is not None and not isinstance(bakery_version, int):
raise Exception("Expected bakery_version to be a int, received: {}".format(type(bakery_version)))
if offer_urls is not None and not isinstance(offer_urls, (bytes, str, list)):
raise Exception("Expected offer_urls to be a Sequence, received: {}".format(type(offer_urls)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='GetConsumeDetails',
version=2,
params=_params)
_params['bakery-version'] = bakery_version
_params['offer-urls'] = offer_urls
reply = await self.rpc(msg)
return reply
@ReturnMapping(QueryApplicationOffersResults)
async def ListApplicationOffers(self, filters=None):
'''
filters : typing.Sequence[~OfferFilter]
Returns -> typing.Sequence[~ApplicationOfferAdminDetails]
'''
if filters is not None and not isinstance(filters, (bytes, str, list)):
raise Exception("Expected filters to be a Sequence, received: {}".format(type(filters)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='ListApplicationOffers',
version=2,
params=_params)
_params['Filters'] = filters
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def ModifyOfferAccess(self, changes=None):
'''
changes : typing.Sequence[~ModifyOfferAccess]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='ModifyOfferAccess',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def Offer(self, offers=None):
'''
offers : typing.Sequence[~AddApplicationOffer]
Returns -> typing.Sequence[~ErrorResult]
'''
if offers is not None and not isinstance(offers, (bytes, str, list)):
raise Exception("Expected offers to be a Sequence, received: {}".format(type(offers)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='Offer',
version=2,
params=_params)
_params['Offers'] = offers
reply = await self.rpc(msg)
return reply
@ReturnMapping(RemoteApplicationInfoResults)
async def RemoteApplicationInfo(self, bakery_version=None, offer_urls=None):
'''
bakery_version : int
offer_urls : typing.Sequence[str]
Returns -> typing.Sequence[~RemoteApplicationInfoResult]
'''
if bakery_version is not None and not isinstance(bakery_version, int):
raise Exception("Expected bakery_version to be a int, received: {}".format(type(bakery_version)))
if offer_urls is not None and not isinstance(offer_urls, (bytes, str, list)):
raise Exception("Expected offer_urls to be a Sequence, received: {}".format(type(offer_urls)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ApplicationOffers',
request='RemoteApplicationInfo',
version=2,
params=_params)
_params['bakery-version'] = bakery_version
_params['offer-urls'] = offer_urls
reply = await self.rpc(msg)
return reply
class BackupsFacade(Type):
name = 'Backups'
version = 2
schema = {'definitions': {'BackupsCreateArgs': {'additionalProperties': False,
'properties': {'keep-copy': {'type': 'boolean'},
'no-download': {'type': 'boolean'},
'notes': {'type': 'string'}},
'required': ['notes',
'keep-copy',
'no-download'],
'type': 'object'},
'BackupsInfoArgs': {'additionalProperties': False,
'properties': {'id': {'type': 'string'}},
'required': ['id'],
'type': 'object'},
'BackupsListArgs': {'additionalProperties': False,
'type': 'object'},
'BackupsListResult': {'additionalProperties': False,
'properties': {'list': {'items': {'$ref': '#/definitions/BackupsMetadataResult'},
'type': 'array'}},
'required': ['list'],
'type': 'object'},
'BackupsMetadataResult': {'additionalProperties': False,
'properties': {'ca-cert': {'type': 'string'},
'ca-private-key': {'type': 'string'},
'checksum': {'type': 'string'},
'checksum-format': {'type': 'string'},
'controller-machine-id': {'type': 'string'},
'controller-machine-inst-id': {'type': 'string'},
'controller-uuid': {'type': 'string'},
'filename': {'type': 'string'},
'finished': {'format': 'date-time',
'type': 'string'},
'format-version': {'type': 'integer'},
'ha-nodes': {'type': 'integer'},
'hostname': {'type': 'string'},
'id': {'type': 'string'},
'machine': {'type': 'string'},
'model': {'type': 'string'},
'notes': {'type': 'string'},
'series': {'type': 'string'},
'size': {'type': 'integer'},
'started': {'format': 'date-time',
'type': 'string'},
'stored': {'format': 'date-time',
'type': 'string'},
'version': {'$ref': '#/definitions/Number'}},
'required': ['id',
'checksum',
'checksum-format',
'size',
'stored',
'started',
'finished',
'notes',
'model',
'machine',
'hostname',
'version',
'series',
'ca-cert',
'ca-private-key',
'filename',
'format-version',
'controller-uuid',
'controller-machine-id',
'controller-machine-inst-id',
'ha-nodes'],
'type': 'object'},
'BackupsRemoveArgs': {'additionalProperties': False,
'properties': {'ids': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['ids'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Number': {'additionalProperties': False,
'properties': {'Build': {'type': 'integer'},
'Major': {'type': 'integer'},
'Minor': {'type': 'integer'},
'Patch': {'type': 'integer'},
'Tag': {'type': 'string'}},
'required': ['Major',
'Minor',
'Tag',
'Patch',
'Build'],
'type': 'object'},
'RestoreArgs': {'additionalProperties': False,
'properties': {'backup-id': {'type': 'string'}},
'required': ['backup-id'],
'type': 'object'}},
'properties': {'Create': {'properties': {'Params': {'$ref': '#/definitions/BackupsCreateArgs'},
'Result': {'$ref': '#/definitions/BackupsMetadataResult'}},
'type': 'object'},
'FinishRestore': {'type': 'object'},
'Info': {'properties': {'Params': {'$ref': '#/definitions/BackupsInfoArgs'},
'Result': {'$ref': '#/definitions/BackupsMetadataResult'}},
'type': 'object'},
'List': {'properties': {'Params': {'$ref': '#/definitions/BackupsListArgs'},
'Result': {'$ref': '#/definitions/BackupsListResult'}},
'type': 'object'},
'PrepareRestore': {'type': 'object'},
'Remove': {'properties': {'Params': {'$ref': '#/definitions/BackupsRemoveArgs'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Restore': {'properties': {'Params': {'$ref': '#/definitions/RestoreArgs'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(BackupsMetadataResult)
async def Create(self, keep_copy=None, no_download=None, notes=None):
'''
keep_copy : bool
no_download : bool
notes : str
Returns -> typing.Union[str, int, _ForwardRef('Number')]
'''
if keep_copy is not None and not isinstance(keep_copy, bool):
raise Exception("Expected keep_copy to be a bool, received: {}".format(type(keep_copy)))
if no_download is not None and not isinstance(no_download, bool):
raise Exception("Expected no_download to be a bool, received: {}".format(type(no_download)))
if notes is not None and not isinstance(notes, (bytes, str)):
raise Exception("Expected notes to be a str, received: {}".format(type(notes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='Create',
version=2,
params=_params)
_params['keep-copy'] = keep_copy
_params['no-download'] = no_download
_params['notes'] = notes
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def FinishRestore(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='FinishRestore',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(BackupsMetadataResult)
async def Info(self, id_=None):
'''
id_ : str
Returns -> typing.Union[str, int, _ForwardRef('Number')]
'''
if id_ is not None and not isinstance(id_, (bytes, str)):
raise Exception("Expected id_ to be a str, received: {}".format(type(id_)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='Info',
version=2,
params=_params)
_params['id'] = id_
reply = await self.rpc(msg)
return reply
@ReturnMapping(BackupsListResult)
async def List(self):
'''
Returns -> typing.Sequence[~BackupsMetadataResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='List',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def PrepareRestore(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='PrepareRestore',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def Remove(self, ids=None):
'''
ids : typing.Sequence[str]
Returns -> typing.Sequence[~ErrorResult]
'''
if ids is not None and not isinstance(ids, (bytes, str, list)):
raise Exception("Expected ids to be a Sequence, received: {}".format(type(ids)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='Remove',
version=2,
params=_params)
_params['ids'] = ids
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Restore(self, backup_id=None):
'''
backup_id : str
Returns -> None
'''
if backup_id is not None and not isinstance(backup_id, (bytes, str)):
raise Exception("Expected backup_id to be a str, received: {}".format(type(backup_id)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Backups',
request='Restore',
version=2,
params=_params)
_params['backup-id'] = backup_id
reply = await self.rpc(msg)
return reply
class BlockFacade(Type):
name = 'Block'
version = 2
schema = {'definitions': {'Block': {'additionalProperties': False,
'properties': {'id': {'type': 'string'},
'message': {'type': 'string'},
'tag': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['id', 'tag', 'type'],
'type': 'object'},
'BlockResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/Block'}},
'required': ['result'],
'type': 'object'},
'BlockResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/BlockResult'},
'type': 'array'}},
'type': 'object'},
'BlockSwitchParams': {'additionalProperties': False,
'properties': {'message': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['type'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'}},
'properties': {'List': {'properties': {'Result': {'$ref': '#/definitions/BlockResults'}},
'type': 'object'},
'SwitchBlockOff': {'properties': {'Params': {'$ref': '#/definitions/BlockSwitchParams'},
'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'},
'SwitchBlockOn': {'properties': {'Params': {'$ref': '#/definitions/BlockSwitchParams'},
'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(BlockResults)
async def List(self):
'''
Returns -> typing.Sequence[~BlockResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Block',
request='List',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResult)
async def SwitchBlockOff(self, message=None, type_=None):
'''
message : str
type_ : str
Returns -> Error
'''
if message is not None and not isinstance(message, (bytes, str)):
raise Exception("Expected message to be a str, received: {}".format(type(message)))
if type_ is not None and not isinstance(type_, (bytes, str)):
raise Exception("Expected type_ to be a str, received: {}".format(type(type_)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Block',
request='SwitchBlockOff',
version=2,
params=_params)
_params['message'] = message
_params['type'] = type_
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResult)
async def SwitchBlockOn(self, message=None, type_=None):
'''
message : str
type_ : str
Returns -> Error
'''
if message is not None and not isinstance(message, (bytes, str)):
raise Exception("Expected message to be a str, received: {}".format(type(message)))
if type_ is not None and not isinstance(type_, (bytes, str)):
raise Exception("Expected type_ to be a str, received: {}".format(type(type_)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Block',
request='SwitchBlockOn',
version=2,
params=_params)
_params['message'] = message
_params['type'] = type_
reply = await self.rpc(msg)
return reply
class BundleFacade(Type):
name = 'Bundle'
version = 2
schema = {'definitions': {'BundleChange': {'additionalProperties': False,
'properties': {'args': {'items': {'additionalProperties': True,
'type': 'object'},
'type': 'array'},
'id': {'type': 'string'},
'method': {'type': 'string'},
'requires': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['id',
'method',
'args',
'requires'],
'type': 'object'},
'BundleChangesParams': {'additionalProperties': False,
'properties': {'bundleURL': {'type': 'string'},
'yaml': {'type': 'string'}},
'required': ['yaml', 'bundleURL'],
'type': 'object'},
'BundleChangesResults': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/BundleChange'},
'type': 'array'},
'errors': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'StringResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'required': ['result'],
'type': 'object'}},
'properties': {'ExportBundle': {'properties': {'Result': {'$ref': '#/definitions/StringResult'}},
'type': 'object'},
'GetChanges': {'properties': {'Params': {'$ref': '#/definitions/BundleChangesParams'},
'Result': {'$ref': '#/definitions/BundleChangesResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(StringResult)
async def ExportBundle(self):
'''
Returns -> typing.Union[_ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Bundle',
request='ExportBundle',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(BundleChangesResults)
async def GetChanges(self, bundleurl=None, yaml=None):
'''
bundleurl : str
yaml : str
Returns -> typing.Union[typing.Sequence[~BundleChange], typing.Sequence[str]]
'''
if bundleurl is not None and not isinstance(bundleurl, (bytes, str)):
raise Exception("Expected bundleurl to be a str, received: {}".format(type(bundleurl)))
if yaml is not None and not isinstance(yaml, (bytes, str)):
raise Exception("Expected yaml to be a str, received: {}".format(type(yaml)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Bundle',
request='GetChanges',
version=2,
params=_params)
_params['bundleURL'] = bundleurl
_params['yaml'] = yaml
reply = await self.rpc(msg)
return reply
class CharmRevisionUpdaterFacade(Type):
name = 'CharmRevisionUpdater'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'}},
'properties': {'UpdateLatestRevisions': {'properties': {'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResult)
async def UpdateLatestRevisions(self):
'''
Returns -> Error
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='CharmRevisionUpdater',
request='UpdateLatestRevisions',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class CharmsFacade(Type):
name = 'Charms'
version = 2
schema = {'definitions': {'Charm': {'additionalProperties': False,
'properties': {'actions': {'$ref': '#/definitions/CharmActions'},
'config': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmOption'}},
'type': 'object'},
'lxd-profile': {'$ref': '#/definitions/CharmLXDProfile'},
'meta': {'$ref': '#/definitions/CharmMeta'},
'metrics': {'$ref': '#/definitions/CharmMetrics'},
'revision': {'type': 'integer'},
'url': {'type': 'string'}},
'required': ['revision', 'url', 'config'],
'type': 'object'},
'CharmActionSpec': {'additionalProperties': False,
'properties': {'description': {'type': 'string'},
'params': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['description', 'params'],
'type': 'object'},
'CharmActions': {'additionalProperties': False,
'properties': {'specs': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmActionSpec'}},
'type': 'object'}},
'type': 'object'},
'CharmDevice': {'additionalProperties': False,
'properties': {'CountMax': {'type': 'integer'},
'CountMin': {'type': 'integer'},
'Description': {'type': 'string'},
'Name': {'type': 'string'},
'Type': {'type': 'string'}},
'required': ['Name',
'Description',
'Type',
'CountMin',
'CountMax'],
'type': 'object'},
'CharmLXDProfile': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'description': {'type': 'string'},
'devices': {'patternProperties': {'.*': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'}},
'type': 'object'}},
'required': ['config',
'description',
'devices'],
'type': 'object'},
'CharmMeta': {'additionalProperties': False,
'properties': {'categories': {'items': {'type': 'string'},
'type': 'array'},
'description': {'type': 'string'},
'devices': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmDevice'}},
'type': 'object'},
'extra-bindings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'min-juju-version': {'type': 'string'},
'name': {'type': 'string'},
'payload-classes': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmPayloadClass'}},
'type': 'object'},
'peers': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmRelation'}},
'type': 'object'},
'provides': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmRelation'}},
'type': 'object'},
'requires': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmRelation'}},
'type': 'object'},
'resources': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmResourceMeta'}},
'type': 'object'},
'series': {'items': {'type': 'string'},
'type': 'array'},
'storage': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmStorage'}},
'type': 'object'},
'subordinate': {'type': 'boolean'},
'summary': {'type': 'string'},
'tags': {'items': {'type': 'string'},
'type': 'array'},
'terms': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['name',
'summary',
'description',
'subordinate'],
'type': 'object'},
'CharmMetric': {'additionalProperties': False,
'properties': {'description': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['type', 'description'],
'type': 'object'},
'CharmMetrics': {'additionalProperties': False,
'properties': {'metrics': {'patternProperties': {'.*': {'$ref': '#/definitions/CharmMetric'}},
'type': 'object'},
'plan': {'$ref': '#/definitions/CharmPlan'}},
'required': ['metrics', 'plan'],
'type': 'object'},
'CharmOption': {'additionalProperties': False,
'properties': {'default': {'additionalProperties': True,
'type': 'object'},
'description': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['type'],
'type': 'object'},
'CharmPayloadClass': {'additionalProperties': False,
'properties': {'name': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['name', 'type'],
'type': 'object'},
'CharmPlan': {'additionalProperties': False,
'properties': {'required': {'type': 'boolean'}},
'required': ['required'],
'type': 'object'},
'CharmRelation': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'limit': {'type': 'integer'},
'name': {'type': 'string'},
'optional': {'type': 'boolean'},
'role': {'type': 'string'},
'scope': {'type': 'string'}},
'required': ['name',
'role',
'interface',
'optional',
'limit',
'scope'],
'type': 'object'},
'CharmResourceMeta': {'additionalProperties': False,
'properties': {'description': {'type': 'string'},
'name': {'type': 'string'},
'path': {'type': 'string'},
'type': {'type': 'string'}},
'required': ['name',
'type',
'path',
'description'],
'type': 'object'},
'CharmStorage': {'additionalProperties': False,
'properties': {'count-max': {'type': 'integer'},
'count-min': {'type': 'integer'},
'description': {'type': 'string'},
'location': {'type': 'string'},
'minimum-size': {'type': 'integer'},
'name': {'type': 'string'},
'properties': {'items': {'type': 'string'},
'type': 'array'},
'read-only': {'type': 'boolean'},
'shared': {'type': 'boolean'},
'type': {'type': 'string'}},
'required': ['name',
'description',
'type',
'shared',
'read-only',
'count-min',
'count-max',
'minimum-size'],
'type': 'object'},
'CharmURL': {'additionalProperties': False,
'properties': {'url': {'type': 'string'}},
'required': ['url'],
'type': 'object'},
'CharmsList': {'additionalProperties': False,
'properties': {'names': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['names'],
'type': 'object'},
'CharmsListResult': {'additionalProperties': False,
'properties': {'charm-urls': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['charm-urls'],
'type': 'object'},
'IsMeteredResult': {'additionalProperties': False,
'properties': {'metered': {'type': 'boolean'}},
'required': ['metered'],
'type': 'object'}},
'properties': {'CharmInfo': {'properties': {'Params': {'$ref': '#/definitions/CharmURL'},
'Result': {'$ref': '#/definitions/Charm'}},
'type': 'object'},
'IsMetered': {'properties': {'Params': {'$ref': '#/definitions/CharmURL'},
'Result': {'$ref': '#/definitions/IsMeteredResult'}},
'type': 'object'},
'List': {'properties': {'Params': {'$ref': '#/definitions/CharmsList'},
'Result': {'$ref': '#/definitions/CharmsListResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(Charm)
async def CharmInfo(self, url=None):
'''
url : str
Returns -> typing.Union[_ForwardRef('CharmActions'), typing.Mapping[str, ~CharmOption], _ForwardRef('CharmLXDProfile'), _ForwardRef('CharmMeta'), _ForwardRef('CharmMetrics'), int, str]
'''
if url is not None and not isinstance(url, (bytes, str)):
raise Exception("Expected url to be a str, received: {}".format(type(url)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Charms',
request='CharmInfo',
version=2,
params=_params)
_params['url'] = url
reply = await self.rpc(msg)
return reply
@ReturnMapping(IsMeteredResult)
async def IsMetered(self, url=None):
'''
url : str
Returns -> bool
'''
if url is not None and not isinstance(url, (bytes, str)):
raise Exception("Expected url to be a str, received: {}".format(type(url)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Charms',
request='IsMetered',
version=2,
params=_params)
_params['url'] = url
reply = await self.rpc(msg)
return reply
@ReturnMapping(CharmsListResult)
async def List(self, names=None):
'''
names : typing.Sequence[str]
Returns -> typing.Sequence[str]
'''
if names is not None and not isinstance(names, (bytes, str, list)):
raise Exception("Expected names to be a Sequence, received: {}".format(type(names)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Charms',
request='List',
version=2,
params=_params)
_params['names'] = names
reply = await self.rpc(msg)
return reply
class CleanerFacade(Type):
name = 'Cleaner'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'}},
'properties': {'Cleanup': {'type': 'object'},
'WatchCleanups': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(None)
async def Cleanup(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Cleaner',
request='Cleanup',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchCleanups(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Cleaner',
request='WatchCleanups',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class ClientFacade(Type):
name = 'Client'
version = 2
schema = {'definitions': {'APIHostPortsResult': {'additionalProperties': False,
'properties': {'servers': {'items': {'items': {'$ref': '#/definitions/HostPort'},
'type': 'array'},
'type': 'array'}},
'required': ['servers'],
'type': 'object'},
'AddCharm': {'additionalProperties': False,
'properties': {'channel': {'type': 'string'},
'force': {'type': 'boolean'},
'url': {'type': 'string'}},
'required': ['url', 'channel', 'force'],
'type': 'object'},
'AddCharmWithAuthorization': {'additionalProperties': False,
'properties': {'channel': {'type': 'string'},
'force': {'type': 'boolean'},
'macaroon': {'$ref': '#/definitions/Macaroon'},
'url': {'type': 'string'}},
'required': ['url',
'channel',
'macaroon',
'force'],
'type': 'object'},
'AddMachineParams': {'additionalProperties': False,
'properties': {'addresses': {'items': {'$ref': '#/definitions/Address'},
'type': 'array'},
'constraints': {'$ref': '#/definitions/Value'},
'container-type': {'type': 'string'},
'disks': {'items': {'$ref': '#/definitions/Constraints'},
'type': 'array'},
'hardware-characteristics': {'$ref': '#/definitions/HardwareCharacteristics'},
'instance-id': {'type': 'string'},
'jobs': {'items': {'type': 'string'},
'type': 'array'},
'nonce': {'type': 'string'},
'parent-id': {'type': 'string'},
'placement': {'$ref': '#/definitions/Placement'},
'series': {'type': 'string'}},
'required': ['series',
'constraints',
'jobs',
'parent-id',
'container-type',
'instance-id',
'nonce',
'hardware-characteristics',
'addresses'],
'type': 'object'},
'AddMachines': {'additionalProperties': False,
'properties': {'params': {'items': {'$ref': '#/definitions/AddMachineParams'},
'type': 'array'}},
'required': ['params'],
'type': 'object'},
'AddMachinesResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'machine': {'type': 'string'}},
'required': ['machine'],
'type': 'object'},
'AddMachinesResults': {'additionalProperties': False,
'properties': {'machines': {'items': {'$ref': '#/definitions/AddMachinesResult'},
'type': 'array'}},
'required': ['machines'],
'type': 'object'},
'Address': {'additionalProperties': False,
'properties': {'scope': {'type': 'string'},
'space-id': {'type': 'string'},
'space-name': {'type': 'string'},
'type': {'type': 'string'},
'value': {'type': 'string'}},
'required': ['value', 'type', 'scope'],
'type': 'object'},
'AgentVersionResult': {'additionalProperties': False,
'properties': {'version': {'$ref': '#/definitions/Number'}},
'required': ['version'],
'type': 'object'},
'AllWatcherId': {'additionalProperties': False,
'properties': {'watcher-id': {'type': 'string'}},
'required': ['watcher-id'],
'type': 'object'},
'ApplicationOfferStatus': {'additionalProperties': False,
'properties': {'active-connected-count': {'type': 'integer'},
'application-name': {'type': 'string'},
'charm': {'type': 'string'},
'endpoints': {'patternProperties': {'.*': {'$ref': '#/definitions/RemoteEndpoint'}},
'type': 'object'},
'err': {'$ref': '#/definitions/Error'},
'offer-name': {'type': 'string'},
'total-connected-count': {'type': 'integer'}},
'required': ['offer-name',
'application-name',
'charm',
'endpoints',
'active-connected-count',
'total-connected-count'],
'type': 'object'},
'ApplicationStatus': {'additionalProperties': False,
'properties': {'can-upgrade-to': {'type': 'string'},
'charm': {'type': 'string'},
'charm-profile': {'type': 'string'},
'charm-verion': {'type': 'string'},
'endpoint-bindings': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'err': {'$ref': '#/definitions/Error'},
'exposed': {'type': 'boolean'},
'int': {'type': 'integer'},
'life': {'type': 'string'},
'meter-statuses': {'patternProperties': {'.*': {'$ref': '#/definitions/MeterStatus'}},
'type': 'object'},
'provider-id': {'type': 'string'},
'public-address': {'type': 'string'},
'relations': {'patternProperties': {'.*': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'series': {'type': 'string'},
'status': {'$ref': '#/definitions/DetailedStatus'},
'subordinate-to': {'items': {'type': 'string'},
'type': 'array'},
'units': {'patternProperties': {'.*': {'$ref': '#/definitions/UnitStatus'}},
'type': 'object'},
'workload-version': {'type': 'string'}},
'required': ['charm',
'series',
'exposed',
'life',
'relations',
'can-upgrade-to',
'subordinate-to',
'units',
'meter-statuses',
'status',
'workload-version',
'charm-verion',
'charm-profile',
'endpoint-bindings',
'public-address'],
'type': 'object'},
'Binary': {'additionalProperties': False,
'properties': {'Arch': {'type': 'string'},
'Build': {'type': 'integer'},
'Major': {'type': 'integer'},
'Minor': {'type': 'integer'},
'Number': {'$ref': '#/definitions/Number'},
'Patch': {'type': 'integer'},
'Series': {'type': 'string'},
'Tag': {'type': 'string'}},
'required': ['Major',
'Minor',
'Tag',
'Patch',
'Build',
'Number',
'Series',
'Arch'],
'type': 'object'},
'BranchStatus': {'additionalProperties': False,
'properties': {'assigned-units': {'patternProperties': {'.*': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'created': {'type': 'integer'},
'created-by': {'type': 'string'}},
'required': ['assigned-units',
'created',
'created-by'],
'type': 'object'},
'BundleChange': {'additionalProperties': False,
'properties': {'args': {'items': {'additionalProperties': True,
'type': 'object'},
'type': 'array'},
'id': {'type': 'string'},
'method': {'type': 'string'},
'requires': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['id',
'method',
'args',
'requires'],
'type': 'object'},
'BundleChangesParams': {'additionalProperties': False,
'properties': {'bundleURL': {'type': 'string'},
'yaml': {'type': 'string'}},
'required': ['yaml', 'bundleURL'],
'type': 'object'},
'BundleChangesResults': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/BundleChange'},
'type': 'array'},
'errors': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'BytesResult': {'additionalProperties': False,
'properties': {'result': {'items': {'type': 'integer'},
'type': 'array'}},
'required': ['result'],
'type': 'object'},
'ConfigValue': {'additionalProperties': False,
'properties': {'source': {'type': 'string'},
'value': {'additionalProperties': True,
'type': 'object'}},
'required': ['value', 'source'],
'type': 'object'},
'Constraints': {'additionalProperties': False,
'properties': {'Count': {'type': 'integer'},
'Pool': {'type': 'string'},
'Size': {'type': 'integer'}},
'required': ['Pool', 'Size', 'Count'],
'type': 'object'},
'DestroyMachines': {'additionalProperties': False,
'properties': {'force': {'type': 'boolean'},
'machine-names': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['machine-names', 'force'],
'type': 'object'},
'DetailedStatus': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'err': {'$ref': '#/definitions/Error'},
'info': {'type': 'string'},
'kind': {'type': 'string'},
'life': {'type': 'string'},
'since': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'},
'version': {'type': 'string'}},
'required': ['status',
'info',
'data',
'since',
'kind',
'version',
'life'],
'type': 'object'},
'EndpointStatus': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'name': {'type': 'string'},
'role': {'type': 'string'},
'subordinate': {'type': 'boolean'}},
'required': ['application',
'name',
'role',
'subordinate'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityStatus': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'since': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'required': ['status', 'info', 'since'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'FindToolsParams': {'additionalProperties': False,
'properties': {'agentstream': {'type': 'string'},
'arch': {'type': 'string'},
'major': {'type': 'integer'},
'minor': {'type': 'integer'},
'number': {'$ref': '#/definitions/Number'},
'series': {'type': 'string'}},
'required': ['number',
'major',
'minor',
'arch',
'series',
'agentstream'],
'type': 'object'},
'FindToolsResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'list': {'items': {'$ref': '#/definitions/Tools'},
'type': 'array'}},
'required': ['list'],
'type': 'object'},
'FullStatus': {'additionalProperties': False,
'properties': {'applications': {'patternProperties': {'.*': {'$ref': '#/definitions/ApplicationStatus'}},
'type': 'object'},
'branches': {'patternProperties': {'.*': {'$ref': '#/definitions/BranchStatus'}},
'type': 'object'},
'controller-timestamp': {'format': 'date-time',
'type': 'string'},
'machines': {'patternProperties': {'.*': {'$ref': '#/definitions/MachineStatus'}},
'type': 'object'},
'model': {'$ref': '#/definitions/ModelStatusInfo'},
'offers': {'patternProperties': {'.*': {'$ref': '#/definitions/ApplicationOfferStatus'}},
'type': 'object'},
'relations': {'items': {'$ref': '#/definitions/RelationStatus'},
'type': 'array'},
'remote-applications': {'patternProperties': {'.*': {'$ref': '#/definitions/RemoteApplicationStatus'}},
'type': 'object'}},
'required': ['model',
'machines',
'applications',
'remote-applications',
'offers',
'relations',
'controller-timestamp',
'branches'],
'type': 'object'},
'GetConstraintsResults': {'additionalProperties': False,
'properties': {'constraints': {'$ref': '#/definitions/Value'}},
'required': ['constraints'],
'type': 'object'},
'HardwareCharacteristics': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'availability-zone': {'type': 'string'},
'cpu-cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'root-disk-source': {'type': 'string'},
'tags': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'History': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'statuses': {'items': {'$ref': '#/definitions/DetailedStatus'},
'type': 'array'}},
'required': ['statuses'],
'type': 'object'},
'HostPort': {'additionalProperties': False,
'properties': {'Address': {'$ref': '#/definitions/Address'},
'port': {'type': 'integer'},
'scope': {'type': 'string'},
'space-id': {'type': 'string'},
'space-name': {'type': 'string'},
'type': {'type': 'string'},
'value': {'type': 'string'}},
'required': ['value',
'type',
'scope',
'Address',
'port'],
'type': 'object'},
'LXDProfile': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'description': {'type': 'string'},
'devices': {'patternProperties': {'.*': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'}},
'type': 'object'}},
'required': ['config',
'description',
'devices'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'MachineHardware': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'availability-zone': {'type': 'string'},
'cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'tags': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'MachineStatus': {'additionalProperties': False,
'properties': {'agent-status': {'$ref': '#/definitions/DetailedStatus'},
'constraints': {'type': 'string'},
'containers': {'patternProperties': {'.*': {'$ref': '#/definitions/MachineStatus'}},
'type': 'object'},
'display-name': {'type': 'string'},
'dns-name': {'type': 'string'},
'hardware': {'type': 'string'},
'has-vote': {'type': 'boolean'},
'id': {'type': 'string'},
'instance-id': {'type': 'string'},
'instance-status': {'$ref': '#/definitions/DetailedStatus'},
'ip-addresses': {'items': {'type': 'string'},
'type': 'array'},
'jobs': {'items': {'type': 'string'},
'type': 'array'},
'lxd-profiles': {'patternProperties': {'.*': {'$ref': '#/definitions/LXDProfile'}},
'type': 'object'},
'modification-status': {'$ref': '#/definitions/DetailedStatus'},
'network-interfaces': {'patternProperties': {'.*': {'$ref': '#/definitions/NetworkInterface'}},
'type': 'object'},
'primary-controller-machine': {'type': 'boolean'},
'series': {'type': 'string'},
'wants-vote': {'type': 'boolean'}},
'required': ['agent-status',
'instance-status',
'modification-status',
'dns-name',
'instance-id',
'display-name',
'series',
'id',
'containers',
'constraints',
'hardware',
'jobs',
'has-vote',
'wants-vote'],
'type': 'object'},
'MeterStatus': {'additionalProperties': False,
'properties': {'color': {'type': 'string'},
'message': {'type': 'string'}},
'required': ['color', 'message'],
'type': 'object'},
'ModelConfigResults': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'$ref': '#/definitions/ConfigValue'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelInfo': {'additionalProperties': False,
'properties': {'agent-version': {'$ref': '#/definitions/Number'},
'cloud-credential-tag': {'type': 'string'},
'cloud-credential-validity': {'type': 'boolean'},
'cloud-region': {'type': 'string'},
'cloud-tag': {'type': 'string'},
'controller-uuid': {'type': 'string'},
'default-series': {'type': 'string'},
'is-controller': {'type': 'boolean'},
'life': {'type': 'string'},
'machines': {'items': {'$ref': '#/definitions/ModelMachineInfo'},
'type': 'array'},
'migration': {'$ref': '#/definitions/ModelMigrationStatus'},
'name': {'type': 'string'},
'owner-tag': {'type': 'string'},
'provider-type': {'type': 'string'},
'sla': {'$ref': '#/definitions/ModelSLAInfo'},
'status': {'$ref': '#/definitions/EntityStatus'},
'type': {'type': 'string'},
'users': {'items': {'$ref': '#/definitions/ModelUserInfo'},
'type': 'array'},
'uuid': {'type': 'string'}},
'required': ['name',
'type',
'uuid',
'controller-uuid',
'is-controller',
'cloud-tag',
'owner-tag',
'life',
'users',
'machines',
'sla',
'agent-version'],
'type': 'object'},
'ModelMachineInfo': {'additionalProperties': False,
'properties': {'display-name': {'type': 'string'},
'ha-primary': {'type': 'boolean'},
'hardware': {'$ref': '#/definitions/MachineHardware'},
'has-vote': {'type': 'boolean'},
'id': {'type': 'string'},
'instance-id': {'type': 'string'},
'message': {'type': 'string'},
'status': {'type': 'string'},
'wants-vote': {'type': 'boolean'}},
'required': ['id'],
'type': 'object'},
'ModelMigrationStatus': {'additionalProperties': False,
'properties': {'end': {'format': 'date-time',
'type': 'string'},
'start': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'required': ['status', 'start'],
'type': 'object'},
'ModelSLA': {'additionalProperties': False,
'properties': {'ModelSLAInfo': {'$ref': '#/definitions/ModelSLAInfo'},
'creds': {'items': {'type': 'integer'},
'type': 'array'},
'level': {'type': 'string'},
'owner': {'type': 'string'}},
'required': ['level',
'owner',
'ModelSLAInfo',
'creds'],
'type': 'object'},
'ModelSLAInfo': {'additionalProperties': False,
'properties': {'level': {'type': 'string'},
'owner': {'type': 'string'}},
'required': ['level', 'owner'],
'type': 'object'},
'ModelSet': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelStatusInfo': {'additionalProperties': False,
'properties': {'available-version': {'type': 'string'},
'cloud-tag': {'type': 'string'},
'meter-status': {'$ref': '#/definitions/MeterStatus'},
'model-status': {'$ref': '#/definitions/DetailedStatus'},
'name': {'type': 'string'},
'region': {'type': 'string'},
'sla': {'type': 'string'},
'type': {'type': 'string'},
'version': {'type': 'string'}},
'required': ['name',
'type',
'cloud-tag',
'version',
'available-version',
'model-status',
'meter-status',
'sla'],
'type': 'object'},
'ModelUnset': {'additionalProperties': False,
'properties': {'keys': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['keys'],
'type': 'object'},
'ModelUserInfo': {'additionalProperties': False,
'properties': {'access': {'type': 'string'},
'display-name': {'type': 'string'},
'last-connection': {'format': 'date-time',
'type': 'string'},
'user': {'type': 'string'}},
'required': ['user',
'display-name',
'last-connection',
'access'],
'type': 'object'},
'ModelUserInfoResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/ModelUserInfo'}},
'type': 'object'},
'ModelUserInfoResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ModelUserInfoResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'NetworkInterface': {'additionalProperties': False,
'properties': {'dns-nameservers': {'items': {'type': 'string'},
'type': 'array'},
'gateway': {'type': 'string'},
'ip-addresses': {'items': {'type': 'string'},
'type': 'array'},
'is-up': {'type': 'boolean'},
'mac-address': {'type': 'string'},
'space': {'type': 'string'}},
'required': ['ip-addresses',
'mac-address',
'is-up'],
'type': 'object'},
'Number': {'additionalProperties': False,
'properties': {'Build': {'type': 'integer'},
'Major': {'type': 'integer'},
'Minor': {'type': 'integer'},
'Patch': {'type': 'integer'},
'Tag': {'type': 'string'}},
'required': ['Major',
'Minor',
'Tag',
'Patch',
'Build'],
'type': 'object'},
'Placement': {'additionalProperties': False,
'properties': {'directive': {'type': 'string'},
'scope': {'type': 'string'}},
'required': ['scope', 'directive'],
'type': 'object'},
'PrivateAddress': {'additionalProperties': False,
'properties': {'target': {'type': 'string'}},
'required': ['target'],
'type': 'object'},
'PrivateAddressResults': {'additionalProperties': False,
'properties': {'private-address': {'type': 'string'}},
'required': ['private-address'],
'type': 'object'},
'ProvisioningScriptParams': {'additionalProperties': False,
'properties': {'data-dir': {'type': 'string'},
'disable-package-commands': {'type': 'boolean'},
'machine-id': {'type': 'string'},
'nonce': {'type': 'string'}},
'required': ['machine-id',
'nonce',
'data-dir',
'disable-package-commands'],
'type': 'object'},
'ProvisioningScriptResult': {'additionalProperties': False,
'properties': {'script': {'type': 'string'}},
'required': ['script'],
'type': 'object'},
'PublicAddress': {'additionalProperties': False,
'properties': {'target': {'type': 'string'}},
'required': ['target'],
'type': 'object'},
'PublicAddressResults': {'additionalProperties': False,
'properties': {'public-address': {'type': 'string'}},
'required': ['public-address'],
'type': 'object'},
'RelationStatus': {'additionalProperties': False,
'properties': {'endpoints': {'items': {'$ref': '#/definitions/EndpointStatus'},
'type': 'array'},
'id': {'type': 'integer'},
'interface': {'type': 'string'},
'key': {'type': 'string'},
'scope': {'type': 'string'},
'status': {'$ref': '#/definitions/DetailedStatus'}},
'required': ['id',
'key',
'interface',
'scope',
'endpoints',
'status'],
'type': 'object'},
'RemoteApplicationStatus': {'additionalProperties': False,
'properties': {'endpoints': {'items': {'$ref': '#/definitions/RemoteEndpoint'},
'type': 'array'},
'err': {'$ref': '#/definitions/Error'},
'life': {'type': 'string'},
'offer-name': {'type': 'string'},
'offer-url': {'type': 'string'},
'relations': {'patternProperties': {'.*': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'status': {'$ref': '#/definitions/DetailedStatus'}},
'required': ['offer-url',
'offer-name',
'endpoints',
'life',
'relations',
'status'],
'type': 'object'},
'RemoteEndpoint': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'limit': {'type': 'integer'},
'name': {'type': 'string'},
'role': {'type': 'string'}},
'required': ['name',
'role',
'interface',
'limit'],
'type': 'object'},
'ResolveCharmResult': {'additionalProperties': False,
'properties': {'error': {'type': 'string'},
'url': {'type': 'string'}},
'type': 'object'},
'ResolveCharmResults': {'additionalProperties': False,
'properties': {'urls': {'items': {'$ref': '#/definitions/ResolveCharmResult'},
'type': 'array'}},
'required': ['urls'],
'type': 'object'},
'ResolveCharms': {'additionalProperties': False,
'properties': {'references': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['references'],
'type': 'object'},
'Resolved': {'additionalProperties': False,
'properties': {'retry': {'type': 'boolean'},
'unit-name': {'type': 'string'}},
'required': ['unit-name', 'retry'],
'type': 'object'},
'SetConstraints': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'constraints': {'$ref': '#/definitions/Value'}},
'required': ['application', 'constraints'],
'type': 'object'},
'SetModelAgentVersion': {'additionalProperties': False,
'properties': {'force': {'type': 'boolean'},
'version': {'$ref': '#/definitions/Number'}},
'required': ['version'],
'type': 'object'},
'StatusHistoryFilter': {'additionalProperties': False,
'properties': {'date': {'format': 'date-time',
'type': 'string'},
'delta': {'type': 'integer'},
'exclude': {'items': {'type': 'string'},
'type': 'array'},
'size': {'type': 'integer'}},
'required': ['size',
'date',
'delta',
'exclude'],
'type': 'object'},
'StatusHistoryRequest': {'additionalProperties': False,
'properties': {'filter': {'$ref': '#/definitions/StatusHistoryFilter'},
'historyKind': {'type': 'string'},
'size': {'type': 'integer'},
'tag': {'type': 'string'}},
'required': ['historyKind',
'size',
'filter',
'tag'],
'type': 'object'},
'StatusHistoryRequests': {'additionalProperties': False,
'properties': {'requests': {'items': {'$ref': '#/definitions/StatusHistoryRequest'},
'type': 'array'}},
'required': ['requests'],
'type': 'object'},
'StatusHistoryResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'history': {'$ref': '#/definitions/History'}},
'required': ['history'],
'type': 'object'},
'StatusHistoryResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/StatusHistoryResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'StatusParams': {'additionalProperties': False,
'properties': {'patterns': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['patterns'],
'type': 'object'},
'StringResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'required': ['result'],
'type': 'object'},
'Tools': {'additionalProperties': False,
'properties': {'sha256': {'type': 'string'},
'size': {'type': 'integer'},
'url': {'type': 'string'},
'version': {'$ref': '#/definitions/Binary'}},
'required': ['version', 'url', 'size'],
'type': 'object'},
'UnitStatus': {'additionalProperties': False,
'properties': {'address': {'type': 'string'},
'agent-status': {'$ref': '#/definitions/DetailedStatus'},
'charm': {'type': 'string'},
'leader': {'type': 'boolean'},
'machine': {'type': 'string'},
'opened-ports': {'items': {'type': 'string'},
'type': 'array'},
'provider-id': {'type': 'string'},
'public-address': {'type': 'string'},
'subordinates': {'patternProperties': {'.*': {'$ref': '#/definitions/UnitStatus'}},
'type': 'object'},
'workload-status': {'$ref': '#/definitions/DetailedStatus'},
'workload-version': {'type': 'string'}},
'required': ['agent-status',
'workload-status',
'workload-version',
'machine',
'opened-ports',
'public-address',
'charm',
'subordinates'],
'type': 'object'},
'Value': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'container': {'type': 'string'},
'cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'instance-type': {'type': 'string'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'root-disk-source': {'type': 'string'},
'spaces': {'items': {'type': 'string'},
'type': 'array'},
'tags': {'items': {'type': 'string'},
'type': 'array'},
'virt-type': {'type': 'string'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'}},
'properties': {'APIHostPorts': {'properties': {'Result': {'$ref': '#/definitions/APIHostPortsResult'}},
'type': 'object'},
'AbortCurrentUpgrade': {'type': 'object'},
'AddCharm': {'properties': {'Params': {'$ref': '#/definitions/AddCharm'}},
'type': 'object'},
'AddCharmWithAuthorization': {'properties': {'Params': {'$ref': '#/definitions/AddCharmWithAuthorization'}},
'type': 'object'},
'AddMachines': {'properties': {'Params': {'$ref': '#/definitions/AddMachines'},
'Result': {'$ref': '#/definitions/AddMachinesResults'}},
'type': 'object'},
'AddMachinesV2': {'properties': {'Params': {'$ref': '#/definitions/AddMachines'},
'Result': {'$ref': '#/definitions/AddMachinesResults'}},
'type': 'object'},
'AgentVersion': {'properties': {'Result': {'$ref': '#/definitions/AgentVersionResult'}},
'type': 'object'},
'CACert': {'properties': {'Result': {'$ref': '#/definitions/BytesResult'}},
'type': 'object'},
'DestroyMachines': {'properties': {'Params': {'$ref': '#/definitions/DestroyMachines'}},
'type': 'object'},
'FindTools': {'properties': {'Params': {'$ref': '#/definitions/FindToolsParams'},
'Result': {'$ref': '#/definitions/FindToolsResult'}},
'type': 'object'},
'FullStatus': {'properties': {'Params': {'$ref': '#/definitions/StatusParams'},
'Result': {'$ref': '#/definitions/FullStatus'}},
'type': 'object'},
'GetBundleChanges': {'properties': {'Params': {'$ref': '#/definitions/BundleChangesParams'},
'Result': {'$ref': '#/definitions/BundleChangesResults'}},
'type': 'object'},
'GetModelConstraints': {'properties': {'Result': {'$ref': '#/definitions/GetConstraintsResults'}},
'type': 'object'},
'InjectMachines': {'properties': {'Params': {'$ref': '#/definitions/AddMachines'},
'Result': {'$ref': '#/definitions/AddMachinesResults'}},
'type': 'object'},
'ModelGet': {'properties': {'Result': {'$ref': '#/definitions/ModelConfigResults'}},
'type': 'object'},
'ModelInfo': {'properties': {'Result': {'$ref': '#/definitions/ModelInfo'}},
'type': 'object'},
'ModelSet': {'properties': {'Params': {'$ref': '#/definitions/ModelSet'}},
'type': 'object'},
'ModelUnset': {'properties': {'Params': {'$ref': '#/definitions/ModelUnset'}},
'type': 'object'},
'ModelUserInfo': {'properties': {'Result': {'$ref': '#/definitions/ModelUserInfoResults'}},
'type': 'object'},
'PrivateAddress': {'properties': {'Params': {'$ref': '#/definitions/PrivateAddress'},
'Result': {'$ref': '#/definitions/PrivateAddressResults'}},
'type': 'object'},
'ProvisioningScript': {'properties': {'Params': {'$ref': '#/definitions/ProvisioningScriptParams'},
'Result': {'$ref': '#/definitions/ProvisioningScriptResult'}},
'type': 'object'},
'PublicAddress': {'properties': {'Params': {'$ref': '#/definitions/PublicAddress'},
'Result': {'$ref': '#/definitions/PublicAddressResults'}},
'type': 'object'},
'ResolveCharms': {'properties': {'Params': {'$ref': '#/definitions/ResolveCharms'},
'Result': {'$ref': '#/definitions/ResolveCharmResults'}},
'type': 'object'},
'Resolved': {'properties': {'Params': {'$ref': '#/definitions/Resolved'}},
'type': 'object'},
'RetryProvisioning': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SLALevel': {'properties': {'Result': {'$ref': '#/definitions/StringResult'}},
'type': 'object'},
'SetModelAgentVersion': {'properties': {'Params': {'$ref': '#/definitions/SetModelAgentVersion'}},
'type': 'object'},
'SetModelConstraints': {'properties': {'Params': {'$ref': '#/definitions/SetConstraints'}},
'type': 'object'},
'SetSLALevel': {'properties': {'Params': {'$ref': '#/definitions/ModelSLA'}},
'type': 'object'},
'StatusHistory': {'properties': {'Params': {'$ref': '#/definitions/StatusHistoryRequests'},
'Result': {'$ref': '#/definitions/StatusHistoryResults'}},
'type': 'object'},
'WatchAll': {'properties': {'Result': {'$ref': '#/definitions/AllWatcherId'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(APIHostPortsResult)
async def APIHostPorts(self):
'''
Returns -> typing.Sequence[~HostPort]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='APIHostPorts',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def AbortCurrentUpgrade(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AbortCurrentUpgrade',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def AddCharm(self, channel=None, force=None, url=None):
'''
channel : str
force : bool
url : str
Returns -> None
'''
if channel is not None and not isinstance(channel, (bytes, str)):
raise Exception("Expected channel to be a str, received: {}".format(type(channel)))
if force is not None and not isinstance(force, bool):
raise Exception("Expected force to be a bool, received: {}".format(type(force)))
if url is not None and not isinstance(url, (bytes, str)):
raise Exception("Expected url to be a str, received: {}".format(type(url)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AddCharm',
version=2,
params=_params)
_params['channel'] = channel
_params['force'] = force
_params['url'] = url
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def AddCharmWithAuthorization(self, channel=None, force=None, macaroon=None, url=None):
'''
channel : str
force : bool
macaroon : Macaroon
url : str
Returns -> None
'''
if channel is not None and not isinstance(channel, (bytes, str)):
raise Exception("Expected channel to be a str, received: {}".format(type(channel)))
if force is not None and not isinstance(force, bool):
raise Exception("Expected force to be a bool, received: {}".format(type(force)))
if macaroon is not None and not isinstance(macaroon, (dict, Macaroon)):
raise Exception("Expected macaroon to be a Macaroon, received: {}".format(type(macaroon)))
if url is not None and not isinstance(url, (bytes, str)):
raise Exception("Expected url to be a str, received: {}".format(type(url)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AddCharmWithAuthorization',
version=2,
params=_params)
_params['channel'] = channel
_params['force'] = force
_params['macaroon'] = macaroon
_params['url'] = url
reply = await self.rpc(msg)
return reply
@ReturnMapping(AddMachinesResults)
async def AddMachines(self, params=None):
'''
params : typing.Sequence[~AddMachineParams]
Returns -> typing.Sequence[~AddMachinesResult]
'''
if params is not None and not isinstance(params, (bytes, str, list)):
raise Exception("Expected params to be a Sequence, received: {}".format(type(params)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AddMachines',
version=2,
params=_params)
_params['params'] = params
reply = await self.rpc(msg)
return reply
@ReturnMapping(AddMachinesResults)
async def AddMachinesV2(self, params=None):
'''
params : typing.Sequence[~AddMachineParams]
Returns -> typing.Sequence[~AddMachinesResult]
'''
if params is not None and not isinstance(params, (bytes, str, list)):
raise Exception("Expected params to be a Sequence, received: {}".format(type(params)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AddMachinesV2',
version=2,
params=_params)
_params['params'] = params
reply = await self.rpc(msg)
return reply
@ReturnMapping(AgentVersionResult)
async def AgentVersion(self):
'''
Returns -> Number
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AgentVersion',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(BytesResult)
async def CACert(self):
'''
Returns -> typing.Sequence[int]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='CACert',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def DestroyMachines(self, force=None, machine_names=None):
'''
force : bool
machine_names : typing.Sequence[str]
Returns -> None
'''
if force is not None and not isinstance(force, bool):
raise Exception("Expected force to be a bool, received: {}".format(type(force)))
if machine_names is not None and not isinstance(machine_names, (bytes, str, list)):
raise Exception("Expected machine_names to be a Sequence, received: {}".format(type(machine_names)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='DestroyMachines',
version=2,
params=_params)
_params['force'] = force
_params['machine-names'] = machine_names
reply = await self.rpc(msg)
return reply
@ReturnMapping(FindToolsResult)
async def FindTools(self, agentstream=None, arch=None, major=None, minor=None, number=None, series=None):
'''
agentstream : str
arch : str
major : int
minor : int
number : Number
series : str
Returns -> typing.Union[_ForwardRef('Error'), typing.Sequence[~Tools]]
'''
if agentstream is not None and not isinstance(agentstream, (bytes, str)):
raise Exception("Expected agentstream to be a str, received: {}".format(type(agentstream)))
if arch is not None and not isinstance(arch, (bytes, str)):
raise Exception("Expected arch to be a str, received: {}".format(type(arch)))
if major is not None and not isinstance(major, int):
raise Exception("Expected major to be a int, received: {}".format(type(major)))
if minor is not None and not isinstance(minor, int):
raise Exception("Expected minor to be a int, received: {}".format(type(minor)))
if number is not None and not isinstance(number, (dict, Number)):
raise Exception("Expected number to be a Number, received: {}".format(type(number)))
if series is not None and not isinstance(series, (bytes, str)):
raise Exception("Expected series to be a str, received: {}".format(type(series)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='FindTools',
version=2,
params=_params)
_params['agentstream'] = agentstream
_params['arch'] = arch
_params['major'] = major
_params['minor'] = minor
_params['number'] = number
_params['series'] = series
reply = await self.rpc(msg)
return reply
@ReturnMapping(FullStatus)
async def FullStatus(self, patterns=None):
'''
patterns : typing.Sequence[str]
Returns -> typing.Union[typing.Mapping[str, ~ApplicationStatus], typing.Mapping[str, ~BranchStatus], str, typing.Mapping[str, ~MachineStatus], _ForwardRef('ModelStatusInfo'), typing.Mapping[str, ~ApplicationOfferStatus], typing.Sequence[~RelationStatus], typing.Mapping[str, ~RemoteApplicationStatus]]
'''
if patterns is not None and not isinstance(patterns, (bytes, str, list)):
raise Exception("Expected patterns to be a Sequence, received: {}".format(type(patterns)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='FullStatus',
version=2,
params=_params)
_params['patterns'] = patterns
reply = await self.rpc(msg)
return reply
@ReturnMapping(BundleChangesResults)
async def GetBundleChanges(self, bundleurl=None, yaml=None):
'''
bundleurl : str
yaml : str
Returns -> typing.Union[typing.Sequence[~BundleChange], typing.Sequence[str]]
'''
if bundleurl is not None and not isinstance(bundleurl, (bytes, str)):
raise Exception("Expected bundleurl to be a str, received: {}".format(type(bundleurl)))
if yaml is not None and not isinstance(yaml, (bytes, str)):
raise Exception("Expected yaml to be a str, received: {}".format(type(yaml)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='GetBundleChanges',
version=2,
params=_params)
_params['bundleURL'] = bundleurl
_params['yaml'] = yaml
reply = await self.rpc(msg)
return reply
@ReturnMapping(GetConstraintsResults)
async def GetModelConstraints(self):
'''
Returns -> Value
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='GetModelConstraints',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(AddMachinesResults)
async def InjectMachines(self, params=None):
'''
params : typing.Sequence[~AddMachineParams]
Returns -> typing.Sequence[~AddMachinesResult]
'''
if params is not None and not isinstance(params, (bytes, str, list)):
raise Exception("Expected params to be a Sequence, received: {}".format(type(params)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='InjectMachines',
version=2,
params=_params)
_params['params'] = params
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelConfigResults)
async def ModelGet(self):
'''
Returns -> typing.Mapping[str, ~ConfigValue]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ModelGet',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelInfo)
async def ModelInfo(self):
'''
Returns -> typing.Union[_ForwardRef('Number'), str, bool, typing.Sequence[~ModelMachineInfo], _ForwardRef('ModelMigrationStatus'), _ForwardRef('ModelSLAInfo'), _ForwardRef('EntityStatus'), typing.Sequence[~ModelUserInfo]]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ModelInfo',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def ModelSet(self, config=None):
'''
config : typing.Mapping[str, typing.Any]
Returns -> None
'''
if config is not None and not isinstance(config, dict):
raise Exception("Expected config to be a Mapping, received: {}".format(type(config)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ModelSet',
version=2,
params=_params)
_params['config'] = config
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def ModelUnset(self, keys=None):
'''
keys : typing.Sequence[str]
Returns -> None
'''
if keys is not None and not isinstance(keys, (bytes, str, list)):
raise Exception("Expected keys to be a Sequence, received: {}".format(type(keys)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ModelUnset',
version=2,
params=_params)
_params['keys'] = keys
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelUserInfoResults)
async def ModelUserInfo(self):
'''
Returns -> typing.Sequence[~ModelUserInfoResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ModelUserInfo',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(PrivateAddressResults)
async def PrivateAddress(self, target=None):
'''
target : str
Returns -> str
'''
if target is not None and not isinstance(target, (bytes, str)):
raise Exception("Expected target to be a str, received: {}".format(type(target)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='PrivateAddress',
version=2,
params=_params)
_params['target'] = target
reply = await self.rpc(msg)
return reply
@ReturnMapping(ProvisioningScriptResult)
async def ProvisioningScript(self, data_dir=None, disable_package_commands=None, machine_id=None, nonce=None):
'''
data_dir : str
disable_package_commands : bool
machine_id : str
nonce : str
Returns -> str
'''
if data_dir is not None and not isinstance(data_dir, (bytes, str)):
raise Exception("Expected data_dir to be a str, received: {}".format(type(data_dir)))
if disable_package_commands is not None and not isinstance(disable_package_commands, bool):
raise Exception("Expected disable_package_commands to be a bool, received: {}".format(type(disable_package_commands)))
if machine_id is not None and not isinstance(machine_id, (bytes, str)):
raise Exception("Expected machine_id to be a str, received: {}".format(type(machine_id)))
if nonce is not None and not isinstance(nonce, (bytes, str)):
raise Exception("Expected nonce to be a str, received: {}".format(type(nonce)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ProvisioningScript',
version=2,
params=_params)
_params['data-dir'] = data_dir
_params['disable-package-commands'] = disable_package_commands
_params['machine-id'] = machine_id
_params['nonce'] = nonce
reply = await self.rpc(msg)
return reply
@ReturnMapping(PublicAddressResults)
async def PublicAddress(self, target=None):
'''
target : str
Returns -> str
'''
if target is not None and not isinstance(target, (bytes, str)):
raise Exception("Expected target to be a str, received: {}".format(type(target)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='PublicAddress',
version=2,
params=_params)
_params['target'] = target
reply = await self.rpc(msg)
return reply
@ReturnMapping(ResolveCharmResults)
async def ResolveCharms(self, references=None):
'''
references : typing.Sequence[str]
Returns -> typing.Sequence[~ResolveCharmResult]
'''
if references is not None and not isinstance(references, (bytes, str, list)):
raise Exception("Expected references to be a Sequence, received: {}".format(type(references)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='ResolveCharms',
version=2,
params=_params)
_params['references'] = references
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Resolved(self, retry=None, unit_name=None):
'''
retry : bool
unit_name : str
Returns -> None
'''
if retry is not None and not isinstance(retry, bool):
raise Exception("Expected retry to be a bool, received: {}".format(type(retry)))
if unit_name is not None and not isinstance(unit_name, (bytes, str)):
raise Exception("Expected unit_name to be a str, received: {}".format(type(unit_name)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='Resolved',
version=2,
params=_params)
_params['retry'] = retry
_params['unit-name'] = unit_name
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def RetryProvisioning(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='RetryProvisioning',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringResult)
async def SLALevel(self):
'''
Returns -> typing.Union[_ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='SLALevel',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetModelAgentVersion(self, force=None, version=None):
'''
force : bool
version : Number
Returns -> None
'''
if force is not None and not isinstance(force, bool):
raise Exception("Expected force to be a bool, received: {}".format(type(force)))
if version is not None and not isinstance(version, (dict, Number)):
raise Exception("Expected version to be a Number, received: {}".format(type(version)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='SetModelAgentVersion',
version=2,
params=_params)
_params['force'] = force
_params['version'] = version
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetModelConstraints(self, application=None, constraints=None):
'''
application : str
constraints : Value
Returns -> None
'''
if application is not None and not isinstance(application, (bytes, str)):
raise Exception("Expected application to be a str, received: {}".format(type(application)))
if constraints is not None and not isinstance(constraints, (dict, Value)):
raise Exception("Expected constraints to be a Value, received: {}".format(type(constraints)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='SetModelConstraints',
version=2,
params=_params)
_params['application'] = application
_params['constraints'] = constraints
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetSLALevel(self, modelslainfo=None, creds=None, level=None, owner=None):
'''
modelslainfo : ModelSLAInfo
creds : typing.Sequence[int]
level : str
owner : str
Returns -> None
'''
if modelslainfo is not None and not isinstance(modelslainfo, (dict, ModelSLAInfo)):
raise Exception("Expected modelslainfo to be a ModelSLAInfo, received: {}".format(type(modelslainfo)))
if creds is not None and not isinstance(creds, (bytes, str, list)):
raise Exception("Expected creds to be a Sequence, received: {}".format(type(creds)))
if level is not None and not isinstance(level, (bytes, str)):
raise Exception("Expected level to be a str, received: {}".format(type(level)))
if owner is not None and not isinstance(owner, (bytes, str)):
raise Exception("Expected owner to be a str, received: {}".format(type(owner)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='SetSLALevel',
version=2,
params=_params)
_params['ModelSLAInfo'] = modelslainfo
_params['creds'] = creds
_params['level'] = level
_params['owner'] = owner
reply = await self.rpc(msg)
return reply
@ReturnMapping(StatusHistoryResults)
async def StatusHistory(self, requests=None):
'''
requests : typing.Sequence[~StatusHistoryRequest]
Returns -> typing.Sequence[~StatusHistoryResult]
'''
if requests is not None and not isinstance(requests, (bytes, str, list)):
raise Exception("Expected requests to be a Sequence, received: {}".format(type(requests)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='StatusHistory',
version=2,
params=_params)
_params['requests'] = requests
reply = await self.rpc(msg)
return reply
@ReturnMapping(AllWatcherId)
async def WatchAll(self):
'''
Returns -> str
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='WatchAll',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class CredentialValidatorFacade(Type):
name = 'CredentialValidator'
version = 2
schema = {'definitions': {'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'InvalidateCredentialArg': {'additionalProperties': False,
'properties': {'reason': {'type': 'string'}},
'type': 'object'},
'ModelCredential': {'additionalProperties': False,
'properties': {'credential-tag': {'type': 'string'},
'exists': {'type': 'boolean'},
'model-tag': {'type': 'string'},
'valid': {'type': 'boolean'}},
'required': ['model-tag',
'credential-tag'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'}},
'properties': {'InvalidateModelCredential': {'properties': {'Params': {'$ref': '#/definitions/InvalidateCredentialArg'},
'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'},
'ModelCredential': {'properties': {'Result': {'$ref': '#/definitions/ModelCredential'}},
'type': 'object'},
'WatchCredential': {'properties': {'Params': {'$ref': '#/definitions/Entity'},
'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'},
'WatchModelCredential': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResult)
async def InvalidateModelCredential(self, reason=None):
'''
reason : str
Returns -> Error
'''
if reason is not None and not isinstance(reason, (bytes, str)):
raise Exception("Expected reason to be a str, received: {}".format(type(reason)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CredentialValidator',
request='InvalidateModelCredential',
version=2,
params=_params)
_params['reason'] = reason
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelCredential)
async def ModelCredential(self):
'''
Returns -> typing.Union[str, bool]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='CredentialValidator',
request='ModelCredential',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchCredential(self, tag=None):
'''
tag : str
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
if tag is not None and not isinstance(tag, (bytes, str)):
raise Exception("Expected tag to be a str, received: {}".format(type(tag)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CredentialValidator',
request='WatchCredential',
version=2,
params=_params)
_params['tag'] = tag
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchModelCredential(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='CredentialValidator',
request='WatchModelCredential',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class CrossModelRelationsFacade(Type):
name = 'CrossModelRelations'
version = 2
schema = {'definitions': {'EntityStatus': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'since': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'required': ['status', 'info', 'since'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'IngressNetworksChangeEvent': {'additionalProperties': False,
'properties': {'application-token': {'type': 'string'},
'bakery-version': {'type': 'integer'},
'ingress-required': {'type': 'boolean'},
'macaroons': {'items': {'$ref': '#/definitions/Macaroon'},
'type': 'array'},
'networks': {'items': {'type': 'string'},
'type': 'array'},
'relation-token': {'type': 'string'}},
'required': ['relation-token',
'application-token',
'ingress-required'],
'type': 'object'},
'IngressNetworksChanges': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/IngressNetworksChangeEvent'},
'type': 'array'}},
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'OfferArg': {'additionalProperties': False,
'properties': {'bakery-version': {'type': 'integer'},
'macaroons': {'items': {'$ref': '#/definitions/Macaroon'},
'type': 'array'},
'offer-uuid': {'type': 'string'}},
'required': ['offer-uuid'],
'type': 'object'},
'OfferArgs': {'additionalProperties': False,
'properties': {'args': {'items': {'$ref': '#/definitions/OfferArg'},
'type': 'array'}},
'required': ['args'],
'type': 'object'},
'OfferStatusChange': {'additionalProperties': False,
'properties': {'offer-name': {'type': 'string'},
'status': {'$ref': '#/definitions/EntityStatus'}},
'required': ['offer-name', 'status'],
'type': 'object'},
'OfferStatusWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/OfferStatusChange'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id',
'changes'],
'type': 'object'},
'OfferStatusWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/OfferStatusWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RegisterRemoteRelationArg': {'additionalProperties': False,
'properties': {'application-token': {'type': 'string'},
'bakery-version': {'type': 'integer'},
'local-endpoint-name': {'type': 'string'},
'macaroons': {'items': {'$ref': '#/definitions/Macaroon'},
'type': 'array'},
'offer-uuid': {'type': 'string'},
'relation-token': {'type': 'string'},
'remote-endpoint': {'$ref': '#/definitions/RemoteEndpoint'},
'remote-space': {'$ref': '#/definitions/RemoteSpace'},
'source-model-tag': {'type': 'string'}},
'required': ['application-token',
'source-model-tag',
'relation-token',
'remote-endpoint',
'remote-space',
'offer-uuid',
'local-endpoint-name'],
'type': 'object'},
'RegisterRemoteRelationArgs': {'additionalProperties': False,
'properties': {'relations': {'items': {'$ref': '#/definitions/RegisterRemoteRelationArg'},
'type': 'array'}},
'required': ['relations'],
'type': 'object'},
'RegisterRemoteRelationResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/RemoteRelationDetails'}},
'type': 'object'},
'RegisterRemoteRelationResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RegisterRemoteRelationResult'},
'type': 'array'}},
'type': 'object'},
'RelationLifeSuspendedStatusChange': {'additionalProperties': False,
'properties': {'key': {'type': 'string'},
'life': {'type': 'string'},
'suspended': {'type': 'boolean'},
'suspended-reason': {'type': 'string'}},
'required': ['key',
'life',
'suspended',
'suspended-reason'],
'type': 'object'},
'RelationLifeSuspendedStatusWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/RelationLifeSuspendedStatusChange'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id',
'changes'],
'type': 'object'},
'RelationStatusWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RelationLifeSuspendedStatusWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RemoteEndpoint': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'limit': {'type': 'integer'},
'name': {'type': 'string'},
'role': {'type': 'string'}},
'required': ['name',
'role',
'interface',
'limit'],
'type': 'object'},
'RemoteEntityArg': {'additionalProperties': False,
'properties': {'bakery-version': {'type': 'integer'},
'macaroons': {'items': {'$ref': '#/definitions/Macaroon'},
'type': 'array'},
'relation-token': {'type': 'string'}},
'required': ['relation-token'],
'type': 'object'},
'RemoteEntityArgs': {'additionalProperties': False,
'properties': {'args': {'items': {'$ref': '#/definitions/RemoteEntityArg'},
'type': 'array'}},
'required': ['args'],
'type': 'object'},
'RemoteRelationChangeEvent': {'additionalProperties': False,
'properties': {'application-settings': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'application-token': {'type': 'string'},
'bakery-version': {'type': 'integer'},
'changed-units': {'items': {'$ref': '#/definitions/RemoteRelationUnitChange'},
'type': 'array'},
'departed-units': {'items': {'type': 'integer'},
'type': 'array'},
'force-cleanup': {'type': 'boolean'},
'life': {'type': 'string'},
'macaroons': {'items': {'$ref': '#/definitions/Macaroon'},
'type': 'array'},
'relation-token': {'type': 'string'},
'suspended': {'type': 'boolean'},
'suspended-reason': {'type': 'string'}},
'required': ['relation-token',
'application-token',
'life'],
'type': 'object'},
'RemoteRelationDetails': {'additionalProperties': False,
'properties': {'bakery-version': {'type': 'integer'},
'macaroon': {'$ref': '#/definitions/Macaroon'},
'relation-token': {'type': 'string'}},
'required': ['relation-token'],
'type': 'object'},
'RemoteRelationUnitChange': {'additionalProperties': False,
'properties': {'settings': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'unit-id': {'type': 'integer'}},
'required': ['unit-id'],
'type': 'object'},
'RemoteRelationWatchResult': {'additionalProperties': False,
'properties': {'changes': {'$ref': '#/definitions/RemoteRelationChangeEvent'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id',
'changes'],
'type': 'object'},
'RemoteRelationWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RemoteRelationWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RemoteRelationsChanges': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/RemoteRelationChangeEvent'},
'type': 'array'}},
'type': 'object'},
'RemoteSpace': {'additionalProperties': False,
'properties': {'cloud-type': {'type': 'string'},
'name': {'type': 'string'},
'provider-attributes': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'provider-id': {'type': 'string'},
'subnets': {'items': {'$ref': '#/definitions/Subnet'},
'type': 'array'}},
'required': ['cloud-type',
'name',
'provider-id',
'provider-attributes',
'subnets'],
'type': 'object'},
'StringsWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'type': 'string'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id'],
'type': 'object'},
'StringsWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/StringsWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Subnet': {'additionalProperties': False,
'properties': {'cidr': {'type': 'string'},
'life': {'type': 'string'},
'provider-id': {'type': 'string'},
'provider-network-id': {'type': 'string'},
'provider-space-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'status': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['cidr',
'vlan-tag',
'life',
'space-tag',
'zones'],
'type': 'object'}},
'properties': {'PublishIngressNetworkChanges': {'properties': {'Params': {'$ref': '#/definitions/IngressNetworksChanges'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'PublishRelationChanges': {'properties': {'Params': {'$ref': '#/definitions/RemoteRelationsChanges'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'RegisterRemoteRelations': {'properties': {'Params': {'$ref': '#/definitions/RegisterRemoteRelationArgs'},
'Result': {'$ref': '#/definitions/RegisterRemoteRelationResults'}},
'type': 'object'},
'WatchEgressAddressesForRelations': {'properties': {'Params': {'$ref': '#/definitions/RemoteEntityArgs'},
'Result': {'$ref': '#/definitions/StringsWatchResults'}},
'type': 'object'},
'WatchOfferStatus': {'properties': {'Params': {'$ref': '#/definitions/OfferArgs'},
'Result': {'$ref': '#/definitions/OfferStatusWatchResults'}},
'type': 'object'},
'WatchRelationChanges': {'properties': {'Params': {'$ref': '#/definitions/RemoteEntityArgs'},
'Result': {'$ref': '#/definitions/RemoteRelationWatchResults'}},
'type': 'object'},
'WatchRelationsSuspendedStatus': {'properties': {'Params': {'$ref': '#/definitions/RemoteEntityArgs'},
'Result': {'$ref': '#/definitions/RelationStatusWatchResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def PublishIngressNetworkChanges(self, changes=None):
'''
changes : typing.Sequence[~IngressNetworksChangeEvent]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='PublishIngressNetworkChanges',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def PublishRelationChanges(self, changes=None):
'''
changes : typing.Sequence[~RemoteRelationChangeEvent]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='PublishRelationChanges',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(RegisterRemoteRelationResults)
async def RegisterRemoteRelations(self, relations=None):
'''
relations : typing.Sequence[~RegisterRemoteRelationArg]
Returns -> typing.Sequence[~RegisterRemoteRelationResult]
'''
if relations is not None and not isinstance(relations, (bytes, str, list)):
raise Exception("Expected relations to be a Sequence, received: {}".format(type(relations)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='RegisterRemoteRelations',
version=2,
params=_params)
_params['relations'] = relations
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringsWatchResults)
async def WatchEgressAddressesForRelations(self, args=None):
'''
args : typing.Sequence[~RemoteEntityArg]
Returns -> typing.Sequence[~StringsWatchResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='WatchEgressAddressesForRelations',
version=2,
params=_params)
_params['args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(OfferStatusWatchResults)
async def WatchOfferStatus(self, args=None):
'''
args : typing.Sequence[~OfferArg]
Returns -> typing.Sequence[~OfferStatusWatchResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='WatchOfferStatus',
version=2,
params=_params)
_params['args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(RemoteRelationWatchResults)
async def WatchRelationChanges(self, args=None):
'''
args : typing.Sequence[~RemoteEntityArg]
Returns -> typing.Sequence[~RemoteRelationWatchResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='WatchRelationChanges',
version=2,
params=_params)
_params['args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(RelationStatusWatchResults)
async def WatchRelationsSuspendedStatus(self, args=None):
'''
args : typing.Sequence[~RemoteEntityArg]
Returns -> typing.Sequence[~RelationLifeSuspendedStatusWatchResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='CrossModelRelations',
request='WatchRelationsSuspendedStatus',
version=2,
params=_params)
_params['args'] = args
reply = await self.rpc(msg)
return reply
class DiscoverSpacesFacade(Type):
name = 'DiscoverSpaces'
version = 2
schema = {'definitions': {'AddSubnetParams': {'additionalProperties': False,
'properties': {'provider-network-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'subnet-provider-id': {'type': 'string'},
'subnet-tag': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['space-tag'],
'type': 'object'},
'AddSubnetsParams': {'additionalProperties': False,
'properties': {'subnets': {'items': {'$ref': '#/definitions/AddSubnetParams'},
'type': 'array'}},
'required': ['subnets'],
'type': 'object'},
'CreateSpaceParams': {'additionalProperties': False,
'properties': {'provider-id': {'type': 'string'},
'public': {'type': 'boolean'},
'space-tag': {'type': 'string'},
'subnet-tags': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['subnet-tags',
'space-tag',
'public'],
'type': 'object'},
'CreateSpacesParams': {'additionalProperties': False,
'properties': {'spaces': {'items': {'$ref': '#/definitions/CreateSpaceParams'},
'type': 'array'}},
'required': ['spaces'],
'type': 'object'},
'DiscoverSpacesResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ProviderSpace'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ListSubnetsResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/Subnet'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'ModelConfigResult': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ProviderSpace': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'name': {'type': 'string'},
'provider-id': {'type': 'string'},
'subnets': {'items': {'$ref': '#/definitions/Subnet'},
'type': 'array'}},
'required': ['name',
'provider-id',
'subnets'],
'type': 'object'},
'Subnet': {'additionalProperties': False,
'properties': {'cidr': {'type': 'string'},
'life': {'type': 'string'},
'provider-id': {'type': 'string'},
'provider-network-id': {'type': 'string'},
'provider-space-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'status': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['cidr',
'vlan-tag',
'life',
'space-tag',
'zones'],
'type': 'object'},
'SubnetsFilters': {'additionalProperties': False,
'properties': {'space-tag': {'type': 'string'},
'zone': {'type': 'string'}},
'type': 'object'}},
'properties': {'AddSubnets': {'properties': {'Params': {'$ref': '#/definitions/AddSubnetsParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'CreateSpaces': {'properties': {'Params': {'$ref': '#/definitions/CreateSpacesParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'ListSpaces': {'properties': {'Result': {'$ref': '#/definitions/DiscoverSpacesResults'}},
'type': 'object'},
'ListSubnets': {'properties': {'Params': {'$ref': '#/definitions/SubnetsFilters'},
'Result': {'$ref': '#/definitions/ListSubnetsResults'}},
'type': 'object'},
'ModelConfig': {'properties': {'Result': {'$ref': '#/definitions/ModelConfigResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def AddSubnets(self, subnets=None):
'''
subnets : typing.Sequence[~AddSubnetParams]
Returns -> typing.Sequence[~ErrorResult]
'''
if subnets is not None and not isinstance(subnets, (bytes, str, list)):
raise Exception("Expected subnets to be a Sequence, received: {}".format(type(subnets)))
# map input types to rpc msg
_params = dict()
msg = dict(type='DiscoverSpaces',
request='AddSubnets',
version=2,
params=_params)
_params['subnets'] = subnets
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def CreateSpaces(self, spaces=None):
'''
spaces : typing.Sequence[~CreateSpaceParams]
Returns -> typing.Sequence[~ErrorResult]
'''
if spaces is not None and not isinstance(spaces, (bytes, str, list)):
raise Exception("Expected spaces to be a Sequence, received: {}".format(type(spaces)))
# map input types to rpc msg
_params = dict()
msg = dict(type='DiscoverSpaces',
request='CreateSpaces',
version=2,
params=_params)
_params['spaces'] = spaces
reply = await self.rpc(msg)
return reply
@ReturnMapping(DiscoverSpacesResults)
async def ListSpaces(self):
'''
Returns -> typing.Sequence[~ProviderSpace]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='DiscoverSpaces',
request='ListSpaces',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ListSubnetsResults)
async def ListSubnets(self, space_tag=None, zone=None):
'''
space_tag : str
zone : str
Returns -> typing.Sequence[~Subnet]
'''
if space_tag is not None and not isinstance(space_tag, (bytes, str)):
raise Exception("Expected space_tag to be a str, received: {}".format(type(space_tag)))
if zone is not None and not isinstance(zone, (bytes, str)):
raise Exception("Expected zone to be a str, received: {}".format(type(zone)))
# map input types to rpc msg
_params = dict()
msg = dict(type='DiscoverSpaces',
request='ListSubnets',
version=2,
params=_params)
_params['space-tag'] = space_tag
_params['zone'] = zone
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelConfigResult)
async def ModelConfig(self):
'''
Returns -> typing.Mapping[str, typing.Any]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='DiscoverSpaces',
request='ModelConfig',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class DiskManagerFacade(Type):
name = 'DiskManager'
version = 2
schema = {'definitions': {'BlockDevice': {'additionalProperties': False,
'properties': {'BusAddress': {'type': 'string'},
'DeviceLinks': {'items': {'type': 'string'},
'type': 'array'},
'DeviceName': {'type': 'string'},
'FilesystemType': {'type': 'string'},
'HardwareId': {'type': 'string'},
'InUse': {'type': 'boolean'},
'Label': {'type': 'string'},
'MountPoint': {'type': 'string'},
'SerialId': {'type': 'string'},
'Size': {'type': 'integer'},
'UUID': {'type': 'string'},
'WWN': {'type': 'string'}},
'required': ['DeviceName',
'DeviceLinks',
'Label',
'UUID',
'HardwareId',
'WWN',
'BusAddress',
'Size',
'FilesystemType',
'InUse',
'MountPoint',
'SerialId'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'MachineBlockDevices': {'additionalProperties': False,
'properties': {'block-devices': {'items': {'$ref': '#/definitions/BlockDevice'},
'type': 'array'},
'machine': {'type': 'string'}},
'required': ['machine'],
'type': 'object'},
'SetMachineBlockDevices': {'additionalProperties': False,
'properties': {'machine-block-devices': {'items': {'$ref': '#/definitions/MachineBlockDevices'},
'type': 'array'}},
'required': ['machine-block-devices'],
'type': 'object'}},
'properties': {'SetMachineBlockDevices': {'properties': {'Params': {'$ref': '#/definitions/SetMachineBlockDevices'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def SetMachineBlockDevices(self, machine_block_devices=None):
'''
machine_block_devices : typing.Sequence[~MachineBlockDevices]
Returns -> typing.Sequence[~ErrorResult]
'''
if machine_block_devices is not None and not isinstance(machine_block_devices, (bytes, str, list)):
raise Exception("Expected machine_block_devices to be a Sequence, received: {}".format(type(machine_block_devices)))
# map input types to rpc msg
_params = dict()
msg = dict(type='DiskManager',
request='SetMachineBlockDevices',
version=2,
params=_params)
_params['machine-block-devices'] = machine_block_devices
reply = await self.rpc(msg)
return reply
class EntityWatcherFacade(Type):
name = 'EntityWatcher'
version = 2
schema = {'definitions': {'EntitiesWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'type': 'string'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'}},
'properties': {'Next': {'properties': {'Result': {'$ref': '#/definitions/EntitiesWatchResult'}},
'type': 'object'},
'Stop': {'type': 'object'}},
'type': 'object'}
@ReturnMapping(EntitiesWatchResult)
async def Next(self):
'''
Returns -> typing.Union[typing.Sequence[str], _ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='EntityWatcher',
request='Next',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Stop(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='EntityWatcher',
request='Stop',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class FilesystemAttachmentsWatcherFacade(Type):
name = 'FilesystemAttachmentsWatcher'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'MachineStorageId': {'additionalProperties': False,
'properties': {'attachment-tag': {'type': 'string'},
'machine-tag': {'type': 'string'}},
'required': ['machine-tag',
'attachment-tag'],
'type': 'object'},
'MachineStorageIdsWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/MachineStorageId'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id',
'changes'],
'type': 'object'}},
'properties': {'Next': {'properties': {'Result': {'$ref': '#/definitions/MachineStorageIdsWatchResult'}},
'type': 'object'},
'Stop': {'type': 'object'}},
'type': 'object'}
@ReturnMapping(MachineStorageIdsWatchResult)
async def Next(self):
'''
Returns -> typing.Union[typing.Sequence[~MachineStorageId], _ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='FilesystemAttachmentsWatcher',
request='Next',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Stop(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='FilesystemAttachmentsWatcher',
request='Stop',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class HighAvailabilityFacade(Type):
name = 'HighAvailability'
version = 2
schema = {'definitions': {'ControllersChangeResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/ControllersChanges'}},
'required': ['result'],
'type': 'object'},
'ControllersChangeResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ControllersChangeResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ControllersChanges': {'additionalProperties': False,
'properties': {'added': {'items': {'type': 'string'},
'type': 'array'},
'converted': {'items': {'type': 'string'},
'type': 'array'},
'maintained': {'items': {'type': 'string'},
'type': 'array'},
'removed': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'ControllersSpec': {'additionalProperties': False,
'properties': {'constraints': {'$ref': '#/definitions/Value'},
'num-controllers': {'type': 'integer'},
'placement': {'items': {'type': 'string'},
'type': 'array'},
'series': {'type': 'string'}},
'required': ['num-controllers'],
'type': 'object'},
'ControllersSpecs': {'additionalProperties': False,
'properties': {'specs': {'items': {'$ref': '#/definitions/ControllersSpec'},
'type': 'array'}},
'required': ['specs'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'Value': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'container': {'type': 'string'},
'cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'instance-type': {'type': 'string'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'root-disk-source': {'type': 'string'},
'spaces': {'items': {'type': 'string'},
'type': 'array'},
'tags': {'items': {'type': 'string'},
'type': 'array'},
'virt-type': {'type': 'string'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'}},
'properties': {'EnableHA': {'properties': {'Params': {'$ref': '#/definitions/ControllersSpecs'},
'Result': {'$ref': '#/definitions/ControllersChangeResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ControllersChangeResults)
async def EnableHA(self, specs=None):
'''
specs : typing.Sequence[~ControllersSpec]
Returns -> typing.Sequence[~ControllersChangeResult]
'''
if specs is not None and not isinstance(specs, (bytes, str, list)):
raise Exception("Expected specs to be a Sequence, received: {}".format(type(specs)))
# map input types to rpc msg
_params = dict()
msg = dict(type='HighAvailability',
request='EnableHA',
version=2,
params=_params)
_params['specs'] = specs
reply = await self.rpc(msg)
return reply
class ImageManagerFacade(Type):
name = 'ImageManager'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ImageFilterParams': {'additionalProperties': False,
'properties': {'images': {'items': {'$ref': '#/definitions/ImageSpec'},
'type': 'array'}},
'required': ['images'],
'type': 'object'},
'ImageMetadata': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'created': {'format': 'date-time',
'type': 'string'},
'kind': {'type': 'string'},
'series': {'type': 'string'},
'url': {'type': 'string'}},
'required': ['kind',
'arch',
'series',
'url',
'created'],
'type': 'object'},
'ImageSpec': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'kind': {'type': 'string'},
'series': {'type': 'string'}},
'required': ['kind', 'arch', 'series'],
'type': 'object'},
'ListImageResult': {'additionalProperties': False,
'properties': {'result': {'items': {'$ref': '#/definitions/ImageMetadata'},
'type': 'array'}},
'required': ['result'],
'type': 'object'}},
'properties': {'DeleteImages': {'properties': {'Params': {'$ref': '#/definitions/ImageFilterParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'ListImages': {'properties': {'Params': {'$ref': '#/definitions/ImageFilterParams'},
'Result': {'$ref': '#/definitions/ListImageResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def DeleteImages(self, images=None):
'''
images : typing.Sequence[~ImageSpec]
Returns -> typing.Sequence[~ErrorResult]
'''
if images is not None and not isinstance(images, (bytes, str, list)):
raise Exception("Expected images to be a Sequence, received: {}".format(type(images)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ImageManager',
request='DeleteImages',
version=2,
params=_params)
_params['images'] = images
reply = await self.rpc(msg)
return reply
@ReturnMapping(ListImageResult)
async def ListImages(self, images=None):
'''
images : typing.Sequence[~ImageSpec]
Returns -> typing.Sequence[~ImageMetadata]
'''
if images is not None and not isinstance(images, (bytes, str, list)):
raise Exception("Expected images to be a Sequence, received: {}".format(type(images)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ImageManager',
request='ListImages',
version=2,
params=_params)
_params['images'] = images
reply = await self.rpc(msg)
return reply
class ImageMetadataFacade(Type):
name = 'ImageMetadata'
version = 2
schema = {'definitions': {'CloudImageMetadata': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'image-id': {'type': 'string'},
'priority': {'type': 'integer'},
'region': {'type': 'string'},
'root-storage-size': {'type': 'integer'},
'root-storage-type': {'type': 'string'},
'series': {'type': 'string'},
'source': {'type': 'string'},
'stream': {'type': 'string'},
'version': {'type': 'string'},
'virt-type': {'type': 'string'}},
'required': ['image-id',
'region',
'version',
'series',
'arch',
'source',
'priority'],
'type': 'object'},
'CloudImageMetadataList': {'additionalProperties': False,
'properties': {'metadata': {'items': {'$ref': '#/definitions/CloudImageMetadata'},
'type': 'array'}},
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ImageMetadataFilter': {'additionalProperties': False,
'properties': {'arches': {'items': {'type': 'string'},
'type': 'array'},
'region': {'type': 'string'},
'root-storage-type': {'type': 'string'},
'series': {'items': {'type': 'string'},
'type': 'array'},
'stream': {'type': 'string'},
'virt-type': {'type': 'string'}},
'type': 'object'},
'ListCloudImageMetadataResult': {'additionalProperties': False,
'properties': {'result': {'items': {'$ref': '#/definitions/CloudImageMetadata'},
'type': 'array'}},
'required': ['result'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'MetadataImageIds': {'additionalProperties': False,
'properties': {'image-ids': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['image-ids'],
'type': 'object'},
'MetadataSaveParams': {'additionalProperties': False,
'properties': {'metadata': {'items': {'$ref': '#/definitions/CloudImageMetadataList'},
'type': 'array'}},
'type': 'object'}},
'properties': {'Delete': {'properties': {'Params': {'$ref': '#/definitions/MetadataImageIds'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'List': {'properties': {'Params': {'$ref': '#/definitions/ImageMetadataFilter'},
'Result': {'$ref': '#/definitions/ListCloudImageMetadataResult'}},
'type': 'object'},
'Save': {'properties': {'Params': {'$ref': '#/definitions/MetadataSaveParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'UpdateFromPublishedImages': {'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def Delete(self, image_ids=None):
'''
image_ids : typing.Sequence[str]
Returns -> typing.Sequence[~ErrorResult]
'''
if image_ids is not None and not isinstance(image_ids, (bytes, str, list)):
raise Exception("Expected image_ids to be a Sequence, received: {}".format(type(image_ids)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ImageMetadata',
request='Delete',
version=2,
params=_params)
_params['image-ids'] = image_ids
reply = await self.rpc(msg)
return reply
@ReturnMapping(ListCloudImageMetadataResult)
async def List(self, arches=None, region=None, root_storage_type=None, series=None, stream=None, virt_type=None):
'''
arches : typing.Sequence[str]
region : str
root_storage_type : str
series : typing.Sequence[str]
stream : str
virt_type : str
Returns -> typing.Sequence[~CloudImageMetadata]
'''
if arches is not None and not isinstance(arches, (bytes, str, list)):
raise Exception("Expected arches to be a Sequence, received: {}".format(type(arches)))
if region is not None and not isinstance(region, (bytes, str)):
raise Exception("Expected region to be a str, received: {}".format(type(region)))
if root_storage_type is not None and not isinstance(root_storage_type, (bytes, str)):
raise Exception("Expected root_storage_type to be a str, received: {}".format(type(root_storage_type)))
if series is not None and not isinstance(series, (bytes, str, list)):
raise Exception("Expected series to be a Sequence, received: {}".format(type(series)))
if stream is not None and not isinstance(stream, (bytes, str)):
raise Exception("Expected stream to be a str, received: {}".format(type(stream)))
if virt_type is not None and not isinstance(virt_type, (bytes, str)):
raise Exception("Expected virt_type to be a str, received: {}".format(type(virt_type)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ImageMetadata',
request='List',
version=2,
params=_params)
_params['arches'] = arches
_params['region'] = region
_params['root-storage-type'] = root_storage_type
_params['series'] = series
_params['stream'] = stream
_params['virt-type'] = virt_type
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def Save(self, metadata=None):
'''
metadata : typing.Sequence[~CloudImageMetadataList]
Returns -> typing.Sequence[~ErrorResult]
'''
if metadata is not None and not isinstance(metadata, (bytes, str, list)):
raise Exception("Expected metadata to be a Sequence, received: {}".format(type(metadata)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ImageMetadata',
request='Save',
version=2,
params=_params)
_params['metadata'] = metadata
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def UpdateFromPublishedImages(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ImageMetadata',
request='UpdateFromPublishedImages',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class InstanceMutaterFacade(Type):
name = 'InstanceMutater'
version = 2
schema = {'definitions': {'CharmLXDProfile': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'description': {'type': 'string'},
'devices': {'patternProperties': {'.*': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'}},
'type': 'object'}},
'required': ['config',
'description',
'devices'],
'type': 'object'},
'CharmProfilingInfoResult': {'additionalProperties': False,
'properties': {'current-profiles': {'items': {'type': 'string'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'instance-id': {'type': 'string'},
'model-name': {'type': 'string'},
'profile-changes': {'items': {'$ref': '#/definitions/ProfileInfoResult'},
'type': 'array'}},
'required': ['instance-id',
'model-name',
'profile-changes',
'current-profiles',
'error'],
'type': 'object'},
'ContainerTypeResult': {'additionalProperties': False,
'properties': {'container-type': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['container-type',
'error'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityStatusArgs': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'status': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag',
'status',
'info',
'data'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'LifeResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'life': {'type': 'string'}},
'required': ['life'],
'type': 'object'},
'LifeResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/LifeResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'NotifyWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/NotifyWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ProfileInfoResult': {'additionalProperties': False,
'properties': {'application-name': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'},
'profile': {'$ref': '#/definitions/CharmLXDProfile'},
'revision': {'type': 'integer'}},
'type': 'object'},
'SetProfileArg': {'additionalProperties': False,
'properties': {'entity': {'$ref': '#/definitions/Entity'},
'profiles': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['entity', 'profiles'],
'type': 'object'},
'SetProfileArgs': {'additionalProperties': False,
'properties': {'args': {'items': {'$ref': '#/definitions/SetProfileArg'},
'type': 'array'}},
'required': ['args'],
'type': 'object'},
'SetStatus': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/EntityStatusArgs'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'StringsWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'type': 'string'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id'],
'type': 'object'}},
'properties': {'CharmProfilingInfo': {'properties': {'Params': {'$ref': '#/definitions/Entity'},
'Result': {'$ref': '#/definitions/CharmProfilingInfoResult'}},
'type': 'object'},
'ContainerType': {'properties': {'Params': {'$ref': '#/definitions/Entity'},
'Result': {'$ref': '#/definitions/ContainerTypeResult'}},
'type': 'object'},
'Life': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/LifeResults'}},
'type': 'object'},
'SetCharmProfiles': {'properties': {'Params': {'$ref': '#/definitions/SetProfileArgs'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SetModificationStatus': {'properties': {'Params': {'$ref': '#/definitions/SetStatus'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'WatchContainers': {'properties': {'Params': {'$ref': '#/definitions/Entity'},
'Result': {'$ref': '#/definitions/StringsWatchResult'}},
'type': 'object'},
'WatchLXDProfileVerificationNeeded': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/NotifyWatchResults'}},
'type': 'object'},
'WatchMachines': {'properties': {'Result': {'$ref': '#/definitions/StringsWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(CharmProfilingInfoResult)
async def CharmProfilingInfo(self, tag=None):
'''
tag : str
Returns -> typing.Union[typing.Sequence[str], _ForwardRef('Error'), str, typing.Sequence[~ProfileInfoResult]]
'''
if tag is not None and not isinstance(tag, (bytes, str)):
raise Exception("Expected tag to be a str, received: {}".format(type(tag)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='CharmProfilingInfo',
version=2,
params=_params)
_params['tag'] = tag
reply = await self.rpc(msg)
return reply
@ReturnMapping(ContainerTypeResult)
async def ContainerType(self, tag=None):
'''
tag : str
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
if tag is not None and not isinstance(tag, (bytes, str)):
raise Exception("Expected tag to be a str, received: {}".format(type(tag)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='ContainerType',
version=2,
params=_params)
_params['tag'] = tag
reply = await self.rpc(msg)
return reply
@ReturnMapping(LifeResults)
async def Life(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~LifeResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='Life',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetCharmProfiles(self, args=None):
'''
args : typing.Sequence[~SetProfileArg]
Returns -> typing.Sequence[~ErrorResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='SetCharmProfiles',
version=2,
params=_params)
_params['args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetModificationStatus(self, entities=None):
'''
entities : typing.Sequence[~EntityStatusArgs]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='SetModificationStatus',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringsWatchResult)
async def WatchContainers(self, tag=None):
'''
tag : str
Returns -> typing.Union[typing.Sequence[str], _ForwardRef('Error'), str]
'''
if tag is not None and not isinstance(tag, (bytes, str)):
raise Exception("Expected tag to be a str, received: {}".format(type(tag)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='WatchContainers',
version=2,
params=_params)
_params['tag'] = tag
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResults)
async def WatchLXDProfileVerificationNeeded(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~NotifyWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='WatchLXDProfileVerificationNeeded',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringsWatchResult)
async def WatchMachines(self):
'''
Returns -> typing.Union[typing.Sequence[str], _ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='InstanceMutater',
request='WatchMachines',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class LeadershipServiceFacade(Type):
name = 'LeadershipService'
version = 2
schema = {'definitions': {'ApplicationTag': {'additionalProperties': False,
'properties': {'Name': {'type': 'string'}},
'required': ['Name'],
'type': 'object'},
'ClaimLeadershipBulkParams': {'additionalProperties': False,
'properties': {'params': {'items': {'$ref': '#/definitions/ClaimLeadershipParams'},
'type': 'array'}},
'required': ['params'],
'type': 'object'},
'ClaimLeadershipBulkResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ClaimLeadershipParams': {'additionalProperties': False,
'properties': {'application-tag': {'type': 'string'},
'duration': {'type': 'number'},
'unit-tag': {'type': 'string'}},
'required': ['application-tag',
'unit-tag',
'duration'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'}},
'properties': {'BlockUntilLeadershipReleased': {'properties': {'Params': {'$ref': '#/definitions/ApplicationTag'},
'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'},
'ClaimLeadership': {'properties': {'Params': {'$ref': '#/definitions/ClaimLeadershipBulkParams'},
'Result': {'$ref': '#/definitions/ClaimLeadershipBulkResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResult)
async def BlockUntilLeadershipReleased(self, name=None):
'''
name : str
Returns -> Error
'''
if name is not None and not isinstance(name, (bytes, str)):
raise Exception("Expected name to be a str, received: {}".format(type(name)))
# map input types to rpc msg
_params = dict()
msg = dict(type='LeadershipService',
request='BlockUntilLeadershipReleased',
version=2,
params=_params)
_params['Name'] = name
reply = await self.rpc(msg)
return reply
@ReturnMapping(ClaimLeadershipBulkResults)
async def ClaimLeadership(self, params=None):
'''
params : typing.Sequence[~ClaimLeadershipParams]
Returns -> typing.Sequence[~ErrorResult]
'''
if params is not None and not isinstance(params, (bytes, str, list)):
raise Exception("Expected params to be a Sequence, received: {}".format(type(params)))
# map input types to rpc msg
_params = dict()
msg = dict(type='LeadershipService',
request='ClaimLeadership',
version=2,
params=_params)
_params['params'] = params
reply = await self.rpc(msg)
return reply
class MachineManagerFacade(Type):
name = 'MachineManager'
version = 2
schema = {'definitions': {'AddMachineParams': {'additionalProperties': False,
'properties': {'addresses': {'items': {'$ref': '#/definitions/Address'},
'type': 'array'},
'constraints': {'$ref': '#/definitions/Value'},
'container-type': {'type': 'string'},
'disks': {'items': {'$ref': '#/definitions/Constraints'},
'type': 'array'},
'hardware-characteristics': {'$ref': '#/definitions/HardwareCharacteristics'},
'instance-id': {'type': 'string'},
'jobs': {'items': {'type': 'string'},
'type': 'array'},
'nonce': {'type': 'string'},
'parent-id': {'type': 'string'},
'placement': {'$ref': '#/definitions/Placement'},
'series': {'type': 'string'}},
'required': ['series',
'constraints',
'jobs',
'parent-id',
'container-type',
'instance-id',
'nonce',
'hardware-characteristics',
'addresses'],
'type': 'object'},
'AddMachines': {'additionalProperties': False,
'properties': {'params': {'items': {'$ref': '#/definitions/AddMachineParams'},
'type': 'array'}},
'required': ['params'],
'type': 'object'},
'AddMachinesResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'machine': {'type': 'string'}},
'required': ['machine'],
'type': 'object'},
'AddMachinesResults': {'additionalProperties': False,
'properties': {'machines': {'items': {'$ref': '#/definitions/AddMachinesResult'},
'type': 'array'}},
'required': ['machines'],
'type': 'object'},
'Address': {'additionalProperties': False,
'properties': {'scope': {'type': 'string'},
'space-name': {'type': 'string'},
'type': {'type': 'string'},
'value': {'type': 'string'}},
'required': ['value', 'type', 'scope'],
'type': 'object'},
'Constraints': {'additionalProperties': False,
'properties': {'Count': {'type': 'integer'},
'Pool': {'type': 'string'},
'Size': {'type': 'integer'}},
'required': ['Pool', 'Size', 'Count'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'HardwareCharacteristics': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'availability-zone': {'type': 'string'},
'cpu-cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'tags': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'InstanceType': {'additionalProperties': False,
'properties': {'arches': {'items': {'type': 'string'},
'type': 'array'},
'cost': {'type': 'integer'},
'cpu-cores': {'type': 'integer'},
'deprecated': {'type': 'boolean'},
'memory': {'type': 'integer'},
'name': {'type': 'string'},
'root-disk': {'type': 'integer'},
'virt-type': {'type': 'string'}},
'required': ['arches', 'cpu-cores', 'memory'],
'type': 'object'},
'InstanceTypesResult': {'additionalProperties': False,
'properties': {'cost-currency': {'type': 'string'},
'cost-divisor': {'type': 'integer'},
'cost-unit': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'},
'instance-types': {'items': {'$ref': '#/definitions/InstanceType'},
'type': 'array'}},
'type': 'object'},
'InstanceTypesResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/InstanceTypesResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'ModelInstanceTypesConstraint': {'additionalProperties': False,
'properties': {'value': {'$ref': '#/definitions/Value'}},
'type': 'object'},
'ModelInstanceTypesConstraints': {'additionalProperties': False,
'properties': {'constraints': {'items': {'$ref': '#/definitions/ModelInstanceTypesConstraint'},
'type': 'array'}},
'required': ['constraints'],
'type': 'object'},
'Placement': {'additionalProperties': False,
'properties': {'directive': {'type': 'string'},
'scope': {'type': 'string'}},
'required': ['scope', 'directive'],
'type': 'object'},
'Value': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'container': {'type': 'string'},
'cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'instance-type': {'type': 'string'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'spaces': {'items': {'type': 'string'},
'type': 'array'},
'tags': {'items': {'type': 'string'},
'type': 'array'},
'virt-type': {'type': 'string'}},
'type': 'object'}},
'properties': {'AddMachines': {'properties': {'Params': {'$ref': '#/definitions/AddMachines'},
'Result': {'$ref': '#/definitions/AddMachinesResults'}},
'type': 'object'},
'InstanceTypes': {'properties': {'Params': {'$ref': '#/definitions/ModelInstanceTypesConstraints'},
'Result': {'$ref': '#/definitions/InstanceTypesResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(AddMachinesResults)
async def AddMachines(self, params=None):
'''
params : typing.Sequence[~AddMachineParams]
Returns -> typing.Sequence[~AddMachinesResult]
'''
if params is not None and not isinstance(params, (bytes, str, list)):
raise Exception("Expected params to be a Sequence, received: {}".format(type(params)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MachineManager',
request='AddMachines',
version=2,
params=_params)
_params['params'] = params
reply = await self.rpc(msg)
return reply
@ReturnMapping(InstanceTypesResults)
async def InstanceTypes(self, constraints=None):
'''
constraints : typing.Sequence[~ModelInstanceTypesConstraint]
Returns -> typing.Sequence[~InstanceTypesResult]
'''
if constraints is not None and not isinstance(constraints, (bytes, str, list)):
raise Exception("Expected constraints to be a Sequence, received: {}".format(type(constraints)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MachineManager',
request='InstanceTypes',
version=2,
params=_params)
_params['constraints'] = constraints
reply = await self.rpc(msg)
return reply
class MachinerFacade(Type):
name = 'Machiner'
version = 2
schema = {'definitions': {'APIHostPortsResult': {'additionalProperties': False,
'properties': {'servers': {'items': {'items': {'$ref': '#/definitions/HostPort'},
'type': 'array'},
'type': 'array'}},
'required': ['servers'],
'type': 'object'},
'Address': {'additionalProperties': False,
'properties': {'scope': {'type': 'string'},
'space-id': {'type': 'string'},
'space-name': {'type': 'string'},
'type': {'type': 'string'},
'value': {'type': 'string'}},
'required': ['value', 'type', 'scope'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityStatusArgs': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'status': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag',
'status',
'info',
'data'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'HostPort': {'additionalProperties': False,
'properties': {'Address': {'$ref': '#/definitions/Address'},
'port': {'type': 'integer'},
'scope': {'type': 'string'},
'space-id': {'type': 'string'},
'space-name': {'type': 'string'},
'type': {'type': 'string'},
'value': {'type': 'string'}},
'required': ['value',
'type',
'scope',
'Address',
'port'],
'type': 'object'},
'JobsResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'jobs': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['jobs'],
'type': 'object'},
'JobsResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/JobsResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'LifeResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'life': {'type': 'string'}},
'required': ['life'],
'type': 'object'},
'LifeResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/LifeResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'MachineAddresses': {'additionalProperties': False,
'properties': {'addresses': {'items': {'$ref': '#/definitions/Address'},
'type': 'array'},
'tag': {'type': 'string'}},
'required': ['tag', 'addresses'],
'type': 'object'},
'NetworkConfig': {'additionalProperties': False,
'properties': {'address': {'type': 'string'},
'addresses': {'items': {'$ref': '#/definitions/Address'},
'type': 'array'},
'cidr': {'type': 'string'},
'config-type': {'type': 'string'},
'device-index': {'type': 'integer'},
'disabled': {'type': 'boolean'},
'dns-search-domains': {'items': {'type': 'string'},
'type': 'array'},
'dns-servers': {'items': {'type': 'string'},
'type': 'array'},
'gateway-address': {'type': 'string'},
'interface-name': {'type': 'string'},
'interface-type': {'type': 'string'},
'is-default-gateway': {'type': 'boolean'},
'mac-address': {'type': 'string'},
'mtu': {'type': 'integer'},
'no-auto-start': {'type': 'boolean'},
'parent-interface-name': {'type': 'string'},
'provider-address-id': {'type': 'string'},
'provider-id': {'type': 'string'},
'provider-network-id': {'type': 'string'},
'provider-space-id': {'type': 'string'},
'provider-subnet-id': {'type': 'string'},
'provider-vlan-id': {'type': 'string'},
'routes': {'items': {'$ref': '#/definitions/NetworkRoute'},
'type': 'array'},
'shadow-addresses': {'items': {'$ref': '#/definitions/Address'},
'type': 'array'},
'vlan-tag': {'type': 'integer'}},
'required': ['device-index',
'mac-address',
'cidr',
'mtu',
'provider-id',
'provider-network-id',
'provider-subnet-id',
'provider-space-id',
'provider-address-id',
'provider-vlan-id',
'vlan-tag',
'interface-name',
'parent-interface-name',
'interface-type',
'disabled'],
'type': 'object'},
'NetworkRoute': {'additionalProperties': False,
'properties': {'destination-cidr': {'type': 'string'},
'gateway-ip': {'type': 'string'},
'metric': {'type': 'integer'}},
'required': ['destination-cidr',
'gateway-ip',
'metric'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'NotifyWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/NotifyWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'SetMachineNetworkConfig': {'additionalProperties': False,
'properties': {'config': {'items': {'$ref': '#/definitions/NetworkConfig'},
'type': 'array'},
'tag': {'type': 'string'}},
'required': ['tag', 'config'],
'type': 'object'},
'SetMachinesAddresses': {'additionalProperties': False,
'properties': {'machine-addresses': {'items': {'$ref': '#/definitions/MachineAddresses'},
'type': 'array'}},
'required': ['machine-addresses'],
'type': 'object'},
'SetStatus': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/EntityStatusArgs'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'StringResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'required': ['result'],
'type': 'object'},
'StringsResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'}},
'properties': {'APIAddresses': {'properties': {'Result': {'$ref': '#/definitions/StringsResult'}},
'type': 'object'},
'APIHostPorts': {'properties': {'Result': {'$ref': '#/definitions/APIHostPortsResult'}},
'type': 'object'},
'EnsureDead': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Jobs': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/JobsResults'}},
'type': 'object'},
'Life': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/LifeResults'}},
'type': 'object'},
'ModelUUID': {'properties': {'Result': {'$ref': '#/definitions/StringResult'}},
'type': 'object'},
'RecordAgentStartTime': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SetMachineAddresses': {'properties': {'Params': {'$ref': '#/definitions/SetMachinesAddresses'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SetObservedNetworkConfig': {'properties': {'Params': {'$ref': '#/definitions/SetMachineNetworkConfig'}},
'type': 'object'},
'SetProviderNetworkConfig': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SetStatus': {'properties': {'Params': {'$ref': '#/definitions/SetStatus'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'UpdateStatus': {'properties': {'Params': {'$ref': '#/definitions/SetStatus'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Watch': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/NotifyWatchResults'}},
'type': 'object'},
'WatchAPIHostPorts': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(StringsResult)
async def APIAddresses(self):
'''
Returns -> typing.Union[_ForwardRef('Error'), typing.Sequence[str]]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='APIAddresses',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(APIHostPortsResult)
async def APIHostPorts(self):
'''
Returns -> typing.Sequence[~HostPort]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='APIHostPorts',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def EnsureDead(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='EnsureDead',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(JobsResults)
async def Jobs(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~JobsResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='Jobs',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(LifeResults)
async def Life(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~LifeResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='Life',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringResult)
async def ModelUUID(self):
'''
Returns -> typing.Union[_ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='ModelUUID',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def RecordAgentStartTime(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='RecordAgentStartTime',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetMachineAddresses(self, machine_addresses=None):
'''
machine_addresses : typing.Sequence[~MachineAddresses]
Returns -> typing.Sequence[~ErrorResult]
'''
if machine_addresses is not None and not isinstance(machine_addresses, (bytes, str, list)):
raise Exception("Expected machine_addresses to be a Sequence, received: {}".format(type(machine_addresses)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='SetMachineAddresses',
version=2,
params=_params)
_params['machine-addresses'] = machine_addresses
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetObservedNetworkConfig(self, config=None, tag=None):
'''
config : typing.Sequence[~NetworkConfig]
tag : str
Returns -> None
'''
if config is not None and not isinstance(config, (bytes, str, list)):
raise Exception("Expected config to be a Sequence, received: {}".format(type(config)))
if tag is not None and not isinstance(tag, (bytes, str)):
raise Exception("Expected tag to be a str, received: {}".format(type(tag)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='SetObservedNetworkConfig',
version=2,
params=_params)
_params['config'] = config
_params['tag'] = tag
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetProviderNetworkConfig(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='SetProviderNetworkConfig',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetStatus(self, entities=None):
'''
entities : typing.Sequence[~EntityStatusArgs]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='SetStatus',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def UpdateStatus(self, entities=None):
'''
entities : typing.Sequence[~EntityStatusArgs]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='UpdateStatus',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResults)
async def Watch(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~NotifyWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='Watch',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchAPIHostPorts(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Machiner',
request='WatchAPIHostPorts',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class MetricsAdderFacade(Type):
name = 'MetricsAdder'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Metric': {'additionalProperties': False,
'properties': {'key': {'type': 'string'},
'labels': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'time': {'format': 'date-time',
'type': 'string'},
'value': {'type': 'string'}},
'required': ['key', 'value', 'time'],
'type': 'object'},
'MetricBatch': {'additionalProperties': False,
'properties': {'charm-url': {'type': 'string'},
'created': {'format': 'date-time',
'type': 'string'},
'metrics': {'items': {'$ref': '#/definitions/Metric'},
'type': 'array'},
'uuid': {'type': 'string'}},
'required': ['uuid',
'charm-url',
'created',
'metrics'],
'type': 'object'},
'MetricBatchParam': {'additionalProperties': False,
'properties': {'batch': {'$ref': '#/definitions/MetricBatch'},
'tag': {'type': 'string'}},
'required': ['tag', 'batch'],
'type': 'object'},
'MetricBatchParams': {'additionalProperties': False,
'properties': {'batches': {'items': {'$ref': '#/definitions/MetricBatchParam'},
'type': 'array'}},
'required': ['batches'],
'type': 'object'}},
'properties': {'AddMetricBatches': {'properties': {'Params': {'$ref': '#/definitions/MetricBatchParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def AddMetricBatches(self, batches=None):
'''
batches : typing.Sequence[~MetricBatchParam]
Returns -> typing.Sequence[~ErrorResult]
'''
if batches is not None and not isinstance(batches, (bytes, str, list)):
raise Exception("Expected batches to be a Sequence, received: {}".format(type(batches)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MetricsAdder',
request='AddMetricBatches',
version=2,
params=_params)
_params['batches'] = batches
reply = await self.rpc(msg)
return reply
class MetricsDebugFacade(Type):
name = 'MetricsDebug'
version = 2
schema = {'definitions': {'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityMetrics': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'metrics': {'items': {'$ref': '#/definitions/MetricResult'},
'type': 'array'}},
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'MeterStatusParam': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag', 'code'],
'type': 'object'},
'MeterStatusParams': {'additionalProperties': False,
'properties': {'statues': {'items': {'$ref': '#/definitions/MeterStatusParam'},
'type': 'array'}},
'required': ['statues'],
'type': 'object'},
'MetricResult': {'additionalProperties': False,
'properties': {'key': {'type': 'string'},
'labels': {'patternProperties': {'.*': {'type': 'string'}},
'type': 'object'},
'time': {'format': 'date-time',
'type': 'string'},
'unit': {'type': 'string'},
'value': {'type': 'string'}},
'required': ['time',
'key',
'value',
'unit',
'labels'],
'type': 'object'},
'MetricResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/EntityMetrics'},
'type': 'array'}},
'required': ['results'],
'type': 'object'}},
'properties': {'GetMetrics': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/MetricResults'}},
'type': 'object'},
'SetMeterStatus': {'properties': {'Params': {'$ref': '#/definitions/MeterStatusParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(MetricResults)
async def GetMetrics(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~EntityMetrics]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MetricsDebug',
request='GetMetrics',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetMeterStatus(self, statues=None):
'''
statues : typing.Sequence[~MeterStatusParam]
Returns -> typing.Sequence[~ErrorResult]
'''
if statues is not None and not isinstance(statues, (bytes, str, list)):
raise Exception("Expected statues to be a Sequence, received: {}".format(type(statues)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MetricsDebug',
request='SetMeterStatus',
version=2,
params=_params)
_params['statues'] = statues
reply = await self.rpc(msg)
return reply
class MigrationMasterFacade(Type):
name = 'MigrationMaster'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'MasterMigrationStatus': {'additionalProperties': False,
'properties': {'migration-id': {'type': 'string'},
'phase': {'type': 'string'},
'phase-changed-time': {'format': 'date-time',
'type': 'string'},
'spec': {'$ref': '#/definitions/MigrationSpec'}},
'required': ['spec',
'migration-id',
'phase',
'phase-changed-time'],
'type': 'object'},
'MigrationModelInfo': {'additionalProperties': False,
'properties': {'agent-version': {'$ref': '#/definitions/Number'},
'controller-agent-version': {'$ref': '#/definitions/Number'},
'name': {'type': 'string'},
'owner-tag': {'type': 'string'},
'uuid': {'type': 'string'}},
'required': ['uuid',
'name',
'owner-tag',
'agent-version',
'controller-agent-version'],
'type': 'object'},
'MigrationSpec': {'additionalProperties': False,
'properties': {'model-tag': {'type': 'string'},
'target-info': {'$ref': '#/definitions/MigrationTargetInfo'}},
'required': ['model-tag', 'target-info'],
'type': 'object'},
'MigrationTargetInfo': {'additionalProperties': False,
'properties': {'addrs': {'items': {'type': 'string'},
'type': 'array'},
'auth-tag': {'type': 'string'},
'ca-cert': {'type': 'string'},
'controller-alias': {'type': 'string'},
'controller-tag': {'type': 'string'},
'macaroons': {'type': 'string'},
'password': {'type': 'string'}},
'required': ['controller-tag',
'addrs',
'ca-cert',
'auth-tag'],
'type': 'object'},
'MinionReports': {'additionalProperties': False,
'properties': {'failed': {'items': {'type': 'string'},
'type': 'array'},
'migration-id': {'type': 'string'},
'phase': {'type': 'string'},
'success-count': {'type': 'integer'},
'unknown-count': {'type': 'integer'},
'unknown-sample': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['migration-id',
'phase',
'success-count',
'unknown-count',
'unknown-sample',
'failed'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'Number': {'additionalProperties': False,
'properties': {'Build': {'type': 'integer'},
'Major': {'type': 'integer'},
'Minor': {'type': 'integer'},
'Patch': {'type': 'integer'},
'Tag': {'type': 'string'}},
'required': ['Major',
'Minor',
'Tag',
'Patch',
'Build'],
'type': 'object'},
'ProcessRelations': {'additionalProperties': False,
'properties': {'controller-alias': {'type': 'string'}},
'required': ['controller-alias'],
'type': 'object'},
'SerializedModel': {'additionalProperties': False,
'properties': {'bytes': {'items': {'type': 'integer'},
'type': 'array'},
'charms': {'items': {'type': 'string'},
'type': 'array'},
'resources': {'items': {'$ref': '#/definitions/SerializedModelResource'},
'type': 'array'},
'tools': {'items': {'$ref': '#/definitions/SerializedModelTools'},
'type': 'array'}},
'required': ['bytes',
'charms',
'tools',
'resources'],
'type': 'object'},
'SerializedModelResource': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'application-revision': {'$ref': '#/definitions/SerializedModelResourceRevision'},
'charmstore-revision': {'$ref': '#/definitions/SerializedModelResourceRevision'},
'name': {'type': 'string'},
'unit-revisions': {'patternProperties': {'.*': {'$ref': '#/definitions/SerializedModelResourceRevision'}},
'type': 'object'}},
'required': ['application',
'name',
'application-revision',
'charmstore-revision',
'unit-revisions'],
'type': 'object'},
'SerializedModelResourceRevision': {'additionalProperties': False,
'properties': {'description': {'type': 'string'},
'fingerprint': {'type': 'string'},
'origin': {'type': 'string'},
'path': {'type': 'string'},
'revision': {'type': 'integer'},
'size': {'type': 'integer'},
'timestamp': {'format': 'date-time',
'type': 'string'},
'type': {'type': 'string'},
'username': {'type': 'string'}},
'required': ['revision',
'type',
'path',
'description',
'origin',
'fingerprint',
'size',
'timestamp'],
'type': 'object'},
'SerializedModelTools': {'additionalProperties': False,
'properties': {'uri': {'type': 'string'},
'version': {'type': 'string'}},
'required': ['version', 'uri'],
'type': 'object'},
'SetMigrationPhaseArgs': {'additionalProperties': False,
'properties': {'phase': {'type': 'string'}},
'required': ['phase'],
'type': 'object'},
'SetMigrationStatusMessageArgs': {'additionalProperties': False,
'properties': {'message': {'type': 'string'}},
'required': ['message'],
'type': 'object'}},
'properties': {'Export': {'properties': {'Result': {'$ref': '#/definitions/SerializedModel'}},
'type': 'object'},
'MigrationStatus': {'properties': {'Result': {'$ref': '#/definitions/MasterMigrationStatus'}},
'type': 'object'},
'MinionReports': {'properties': {'Result': {'$ref': '#/definitions/MinionReports'}},
'type': 'object'},
'ModelInfo': {'properties': {'Result': {'$ref': '#/definitions/MigrationModelInfo'}},
'type': 'object'},
'Prechecks': {'type': 'object'},
'ProcessRelations': {'properties': {'Params': {'$ref': '#/definitions/ProcessRelations'}},
'type': 'object'},
'Reap': {'type': 'object'},
'SetPhase': {'properties': {'Params': {'$ref': '#/definitions/SetMigrationPhaseArgs'}},
'type': 'object'},
'SetStatusMessage': {'properties': {'Params': {'$ref': '#/definitions/SetMigrationStatusMessageArgs'}},
'type': 'object'},
'Watch': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'},
'WatchMinionReports': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(SerializedModel)
async def Export(self):
'''
Returns -> typing.Union[typing.Sequence[int], typing.Sequence[str], typing.Sequence[~SerializedModelResource], typing.Sequence[~SerializedModelTools]]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='Export',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(MasterMigrationStatus)
async def MigrationStatus(self):
'''
Returns -> typing.Union[str, _ForwardRef('MigrationSpec')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='MigrationStatus',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(MinionReports)
async def MinionReports(self):
'''
Returns -> typing.Union[typing.Sequence[str], str, int]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='MinionReports',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(MigrationModelInfo)
async def ModelInfo(self):
'''
Returns -> typing.Union[_ForwardRef('Number'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='ModelInfo',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Prechecks(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='Prechecks',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def ProcessRelations(self, controller_alias=None):
'''
controller_alias : str
Returns -> None
'''
if controller_alias is not None and not isinstance(controller_alias, (bytes, str)):
raise Exception("Expected controller_alias to be a str, received: {}".format(type(controller_alias)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='ProcessRelations',
version=2,
params=_params)
_params['controller-alias'] = controller_alias
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Reap(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='Reap',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetPhase(self, phase=None):
'''
phase : str
Returns -> None
'''
if phase is not None and not isinstance(phase, (bytes, str)):
raise Exception("Expected phase to be a str, received: {}".format(type(phase)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='SetPhase',
version=2,
params=_params)
_params['phase'] = phase
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetStatusMessage(self, message=None):
'''
message : str
Returns -> None
'''
if message is not None and not isinstance(message, (bytes, str)):
raise Exception("Expected message to be a str, received: {}".format(type(message)))
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='SetStatusMessage',
version=2,
params=_params)
_params['message'] = message
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def Watch(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='Watch',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchMinionReports(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='MigrationMaster',
request='WatchMinionReports',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class ModelConfigFacade(Type):
name = 'ModelConfig'
version = 2
schema = {'definitions': {'ConfigValue': {'additionalProperties': False,
'properties': {'source': {'type': 'string'},
'value': {'additionalProperties': True,
'type': 'object'}},
'required': ['value', 'source'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ModelConfigResults': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'$ref': '#/definitions/ConfigValue'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelSLA': {'additionalProperties': False,
'properties': {'ModelSLAInfo': {'$ref': '#/definitions/ModelSLAInfo'},
'creds': {'items': {'type': 'integer'},
'type': 'array'},
'level': {'type': 'string'},
'owner': {'type': 'string'}},
'required': ['level',
'owner',
'ModelSLAInfo',
'creds'],
'type': 'object'},
'ModelSLAInfo': {'additionalProperties': False,
'properties': {'level': {'type': 'string'},
'owner': {'type': 'string'}},
'required': ['level', 'owner'],
'type': 'object'},
'ModelSequencesResult': {'additionalProperties': False,
'properties': {'sequences': {'patternProperties': {'.*': {'type': 'integer'}},
'type': 'object'}},
'required': ['sequences'],
'type': 'object'},
'ModelSet': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelUnset': {'additionalProperties': False,
'properties': {'keys': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['keys'],
'type': 'object'},
'StringResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'required': ['result'],
'type': 'object'}},
'properties': {'ModelGet': {'properties': {'Result': {'$ref': '#/definitions/ModelConfigResults'}},
'type': 'object'},
'ModelSet': {'properties': {'Params': {'$ref': '#/definitions/ModelSet'}},
'type': 'object'},
'ModelUnset': {'properties': {'Params': {'$ref': '#/definitions/ModelUnset'}},
'type': 'object'},
'SLALevel': {'properties': {'Result': {'$ref': '#/definitions/StringResult'}},
'type': 'object'},
'Sequences': {'properties': {'Result': {'$ref': '#/definitions/ModelSequencesResult'}},
'type': 'object'},
'SetSLALevel': {'properties': {'Params': {'$ref': '#/definitions/ModelSLA'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ModelConfigResults)
async def ModelGet(self):
'''
Returns -> typing.Mapping[str, ~ConfigValue]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelConfig',
request='ModelGet',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def ModelSet(self, config=None):
'''
config : typing.Mapping[str, typing.Any]
Returns -> None
'''
if config is not None and not isinstance(config, dict):
raise Exception("Expected config to be a Mapping, received: {}".format(type(config)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelConfig',
request='ModelSet',
version=2,
params=_params)
_params['config'] = config
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def ModelUnset(self, keys=None):
'''
keys : typing.Sequence[str]
Returns -> None
'''
if keys is not None and not isinstance(keys, (bytes, str, list)):
raise Exception("Expected keys to be a Sequence, received: {}".format(type(keys)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelConfig',
request='ModelUnset',
version=2,
params=_params)
_params['keys'] = keys
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringResult)
async def SLALevel(self):
'''
Returns -> typing.Union[_ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelConfig',
request='SLALevel',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelSequencesResult)
async def Sequences(self):
'''
Returns -> typing.Mapping[str, int]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelConfig',
request='Sequences',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def SetSLALevel(self, modelslainfo=None, creds=None, level=None, owner=None):
'''
modelslainfo : ModelSLAInfo
creds : typing.Sequence[int]
level : str
owner : str
Returns -> None
'''
if modelslainfo is not None and not isinstance(modelslainfo, (dict, ModelSLAInfo)):
raise Exception("Expected modelslainfo to be a ModelSLAInfo, received: {}".format(type(modelslainfo)))
if creds is not None and not isinstance(creds, (bytes, str, list)):
raise Exception("Expected creds to be a Sequence, received: {}".format(type(creds)))
if level is not None and not isinstance(level, (bytes, str)):
raise Exception("Expected level to be a str, received: {}".format(type(level)))
if owner is not None and not isinstance(owner, (bytes, str)):
raise Exception("Expected owner to be a str, received: {}".format(type(owner)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelConfig',
request='SetSLALevel',
version=2,
params=_params)
_params['ModelSLAInfo'] = modelslainfo
_params['creds'] = creds
_params['level'] = level
_params['owner'] = owner
reply = await self.rpc(msg)
return reply
class ModelGenerationFacade(Type):
name = 'ModelGeneration'
version = 2
schema = {'definitions': {'BoolResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'boolean'}},
'required': ['result'],
'type': 'object'},
'BranchArg': {'additionalProperties': False,
'properties': {'branch': {'type': 'string'}},
'required': ['branch'],
'type': 'object'},
'BranchInfoArgs': {'additionalProperties': False,
'properties': {'branches': {'items': {'type': 'string'},
'type': 'array'},
'detailed': {'type': 'boolean'}},
'required': ['branches', 'detailed'],
'type': 'object'},
'BranchTrackArg': {'additionalProperties': False,
'properties': {'branch': {'type': 'string'},
'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['branch', 'entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Generation': {'additionalProperties': False,
'properties': {'applications': {'items': {'$ref': '#/definitions/GenerationApplication'},
'type': 'array'},
'branch': {'type': 'string'},
'created': {'type': 'integer'},
'created-by': {'type': 'string'}},
'required': ['branch',
'created',
'created-by',
'applications'],
'type': 'object'},
'GenerationApplication': {'additionalProperties': False,
'properties': {'application': {'type': 'string'},
'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'pending': {'items': {'type': 'string'},
'type': 'array'},
'progress': {'type': 'string'},
'tracking': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['application',
'progress',
'config'],
'type': 'object'},
'GenerationResults': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'generations': {'items': {'$ref': '#/definitions/Generation'},
'type': 'array'}},
'required': ['generations'],
'type': 'object'},
'IntResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'integer'}},
'required': ['result'],
'type': 'object'}},
'properties': {'AbortBranch': {'properties': {'Params': {'$ref': '#/definitions/BranchArg'},
'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'},
'AddBranch': {'properties': {'Params': {'$ref': '#/definitions/BranchArg'},
'Result': {'$ref': '#/definitions/ErrorResult'}},
'type': 'object'},
'BranchInfo': {'properties': {'Params': {'$ref': '#/definitions/BranchInfoArgs'},
'Result': {'$ref': '#/definitions/GenerationResults'}},
'type': 'object'},
'CommitBranch': {'properties': {'Params': {'$ref': '#/definitions/BranchArg'},
'Result': {'$ref': '#/definitions/IntResult'}},
'type': 'object'},
'HasActiveBranch': {'properties': {'Params': {'$ref': '#/definitions/BranchArg'},
'Result': {'$ref': '#/definitions/BoolResult'}},
'type': 'object'},
'TrackBranch': {'properties': {'Params': {'$ref': '#/definitions/BranchTrackArg'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResult)
async def AbortBranch(self, branch=None):
'''
branch : str
Returns -> Error
'''
if branch is not None and not isinstance(branch, (bytes, str)):
raise Exception("Expected branch to be a str, received: {}".format(type(branch)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelGeneration',
request='AbortBranch',
version=2,
params=_params)
_params['branch'] = branch
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResult)
async def AddBranch(self, branch=None):
'''
branch : str
Returns -> Error
'''
if branch is not None and not isinstance(branch, (bytes, str)):
raise Exception("Expected branch to be a str, received: {}".format(type(branch)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelGeneration',
request='AddBranch',
version=2,
params=_params)
_params['branch'] = branch
reply = await self.rpc(msg)
return reply
@ReturnMapping(GenerationResults)
async def BranchInfo(self, branches=None, detailed=None):
'''
branches : typing.Sequence[str]
detailed : bool
Returns -> typing.Union[_ForwardRef('Error'), typing.Sequence[~Generation]]
'''
if branches is not None and not isinstance(branches, (bytes, str, list)):
raise Exception("Expected branches to be a Sequence, received: {}".format(type(branches)))
if detailed is not None and not isinstance(detailed, bool):
raise Exception("Expected detailed to be a bool, received: {}".format(type(detailed)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelGeneration',
request='BranchInfo',
version=2,
params=_params)
_params['branches'] = branches
_params['detailed'] = detailed
reply = await self.rpc(msg)
return reply
@ReturnMapping(IntResult)
async def CommitBranch(self, branch=None):
'''
branch : str
Returns -> typing.Union[_ForwardRef('Error'), int]
'''
if branch is not None and not isinstance(branch, (bytes, str)):
raise Exception("Expected branch to be a str, received: {}".format(type(branch)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelGeneration',
request='CommitBranch',
version=2,
params=_params)
_params['branch'] = branch
reply = await self.rpc(msg)
return reply
@ReturnMapping(BoolResult)
async def HasActiveBranch(self, branch=None):
'''
branch : str
Returns -> typing.Union[_ForwardRef('Error'), bool]
'''
if branch is not None and not isinstance(branch, (bytes, str)):
raise Exception("Expected branch to be a str, received: {}".format(type(branch)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelGeneration',
request='HasActiveBranch',
version=2,
params=_params)
_params['branch'] = branch
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def TrackBranch(self, branch=None, entities=None):
'''
branch : str
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if branch is not None and not isinstance(branch, (bytes, str)):
raise Exception("Expected branch to be a str, received: {}".format(type(branch)))
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelGeneration',
request='TrackBranch',
version=2,
params=_params)
_params['branch'] = branch
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
class ModelManagerFacade(Type):
name = 'ModelManager'
version = 2
schema = {'definitions': {'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityStatus': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'since': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'required': ['status', 'info', 'since'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'MachineHardware': {'additionalProperties': False,
'properties': {'arch': {'type': 'string'},
'availability-zone': {'type': 'string'},
'cores': {'type': 'integer'},
'cpu-power': {'type': 'integer'},
'mem': {'type': 'integer'},
'root-disk': {'type': 'integer'},
'tags': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'MapResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['result'],
'type': 'object'},
'MapResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/MapResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Model': {'additionalProperties': False,
'properties': {'name': {'type': 'string'},
'owner-tag': {'type': 'string'},
'uuid': {'type': 'string'}},
'required': ['name', 'uuid', 'owner-tag'],
'type': 'object'},
'ModelCreateArgs': {'additionalProperties': False,
'properties': {'cloud-tag': {'type': 'string'},
'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'credential': {'type': 'string'},
'name': {'type': 'string'},
'owner-tag': {'type': 'string'},
'region': {'type': 'string'}},
'required': ['name', 'owner-tag'],
'type': 'object'},
'ModelDefaultValues': {'additionalProperties': False,
'properties': {'cloud-region': {'type': 'string'},
'cloud-tag': {'type': 'string'},
'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelDefaults': {'additionalProperties': False,
'properties': {'controller': {'additionalProperties': True,
'type': 'object'},
'default': {'additionalProperties': True,
'type': 'object'},
'regions': {'items': {'$ref': '#/definitions/RegionDefaults'},
'type': 'array'}},
'type': 'object'},
'ModelDefaultsResult': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'$ref': '#/definitions/ModelDefaults'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'ModelInfo': {'additionalProperties': False,
'properties': {'agent-version': {'$ref': '#/definitions/Number'},
'cloud-credential-tag': {'type': 'string'},
'cloud-region': {'type': 'string'},
'cloud-tag': {'type': 'string'},
'controller-uuid': {'type': 'string'},
'default-series': {'type': 'string'},
'life': {'type': 'string'},
'machines': {'items': {'$ref': '#/definitions/ModelMachineInfo'},
'type': 'array'},
'migration': {'$ref': '#/definitions/ModelMigrationStatus'},
'name': {'type': 'string'},
'owner-tag': {'type': 'string'},
'provider-type': {'type': 'string'},
'sla': {'$ref': '#/definitions/ModelSLAInfo'},
'status': {'$ref': '#/definitions/EntityStatus'},
'users': {'items': {'$ref': '#/definitions/ModelUserInfo'},
'type': 'array'},
'uuid': {'type': 'string'}},
'required': ['name',
'uuid',
'controller-uuid',
'cloud-tag',
'owner-tag',
'life',
'users',
'machines',
'sla',
'agent-version'],
'type': 'object'},
'ModelInfoResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/ModelInfo'}},
'type': 'object'},
'ModelInfoResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ModelInfoResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ModelMachineInfo': {'additionalProperties': False,
'properties': {'hardware': {'$ref': '#/definitions/MachineHardware'},
'has-vote': {'type': 'boolean'},
'id': {'type': 'string'},
'instance-id': {'type': 'string'},
'status': {'type': 'string'},
'wants-vote': {'type': 'boolean'}},
'required': ['id'],
'type': 'object'},
'ModelMigrationStatus': {'additionalProperties': False,
'properties': {'end': {'format': 'date-time',
'type': 'string'},
'start': {'format': 'date-time',
'type': 'string'},
'status': {'type': 'string'}},
'required': ['status', 'start'],
'type': 'object'},
'ModelSLAInfo': {'additionalProperties': False,
'properties': {'level': {'type': 'string'},
'owner': {'type': 'string'}},
'required': ['level', 'owner'],
'type': 'object'},
'ModelStatus': {'additionalProperties': False,
'properties': {'application-count': {'type': 'integer'},
'hosted-machine-count': {'type': 'integer'},
'life': {'type': 'string'},
'machines': {'items': {'$ref': '#/definitions/ModelMachineInfo'},
'type': 'array'},
'model-tag': {'type': 'string'},
'owner-tag': {'type': 'string'}},
'required': ['model-tag',
'life',
'hosted-machine-count',
'application-count',
'owner-tag'],
'type': 'object'},
'ModelStatusResults': {'additionalProperties': False,
'properties': {'models': {'items': {'$ref': '#/definitions/ModelStatus'},
'type': 'array'}},
'required': ['models'],
'type': 'object'},
'ModelUnsetKeys': {'additionalProperties': False,
'properties': {'cloud-region': {'type': 'string'},
'cloud-tag': {'type': 'string'},
'keys': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['keys'],
'type': 'object'},
'ModelUserInfo': {'additionalProperties': False,
'properties': {'access': {'type': 'string'},
'display-name': {'type': 'string'},
'last-connection': {'format': 'date-time',
'type': 'string'},
'user': {'type': 'string'}},
'required': ['user',
'display-name',
'last-connection',
'access'],
'type': 'object'},
'ModifyModelAccess': {'additionalProperties': False,
'properties': {'access': {'type': 'string'},
'action': {'type': 'string'},
'model-tag': {'type': 'string'},
'user-tag': {'type': 'string'}},
'required': ['user-tag',
'action',
'access',
'model-tag'],
'type': 'object'},
'ModifyModelAccessRequest': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/ModifyModelAccess'},
'type': 'array'}},
'required': ['changes'],
'type': 'object'},
'Number': {'additionalProperties': False,
'properties': {'Build': {'type': 'integer'},
'Major': {'type': 'integer'},
'Minor': {'type': 'integer'},
'Patch': {'type': 'integer'},
'Tag': {'type': 'string'}},
'required': ['Major',
'Minor',
'Tag',
'Patch',
'Build'],
'type': 'object'},
'RegionDefaults': {'additionalProperties': False,
'properties': {'region-name': {'type': 'string'},
'value': {'additionalProperties': True,
'type': 'object'}},
'required': ['region-name', 'value'],
'type': 'object'},
'SetModelDefaults': {'additionalProperties': False,
'properties': {'config': {'items': {'$ref': '#/definitions/ModelDefaultValues'},
'type': 'array'}},
'required': ['config'],
'type': 'object'},
'UnsetModelDefaults': {'additionalProperties': False,
'properties': {'keys': {'items': {'$ref': '#/definitions/ModelUnsetKeys'},
'type': 'array'}},
'required': ['keys'],
'type': 'object'},
'UserModel': {'additionalProperties': False,
'properties': {'last-connection': {'format': 'date-time',
'type': 'string'},
'model': {'$ref': '#/definitions/Model'}},
'required': ['model', 'last-connection'],
'type': 'object'},
'UserModelList': {'additionalProperties': False,
'properties': {'user-models': {'items': {'$ref': '#/definitions/UserModel'},
'type': 'array'}},
'required': ['user-models'],
'type': 'object'}},
'properties': {'CreateModel': {'properties': {'Params': {'$ref': '#/definitions/ModelCreateArgs'},
'Result': {'$ref': '#/definitions/ModelInfo'}},
'type': 'object'},
'DestroyModels': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'DumpModels': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/MapResults'}},
'type': 'object'},
'DumpModelsDB': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/MapResults'}},
'type': 'object'},
'ListModels': {'properties': {'Params': {'$ref': '#/definitions/Entity'},
'Result': {'$ref': '#/definitions/UserModelList'}},
'type': 'object'},
'ModelDefaults': {'properties': {'Result': {'$ref': '#/definitions/ModelDefaultsResult'}},
'type': 'object'},
'ModelInfo': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ModelInfoResults'}},
'type': 'object'},
'ModelStatus': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ModelStatusResults'}},
'type': 'object'},
'ModifyModelAccess': {'properties': {'Params': {'$ref': '#/definitions/ModifyModelAccessRequest'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SetModelDefaults': {'properties': {'Params': {'$ref': '#/definitions/SetModelDefaults'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'UnsetModelDefaults': {'properties': {'Params': {'$ref': '#/definitions/UnsetModelDefaults'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ModelInfo)
async def CreateModel(self, cloud_tag=None, config=None, credential=None, name=None, owner_tag=None, region=None):
'''
cloud_tag : str
config : typing.Mapping[str, typing.Any]
credential : str
name : str
owner_tag : str
region : str
Returns -> typing.Union[_ForwardRef('Number'), str, typing.Sequence[~ModelMachineInfo], _ForwardRef('ModelMigrationStatus'), _ForwardRef('ModelSLAInfo'), _ForwardRef('EntityStatus'), typing.Sequence[~ModelUserInfo]]
'''
if cloud_tag is not None and not isinstance(cloud_tag, (bytes, str)):
raise Exception("Expected cloud_tag to be a str, received: {}".format(type(cloud_tag)))
if config is not None and not isinstance(config, dict):
raise Exception("Expected config to be a Mapping, received: {}".format(type(config)))
if credential is not None and not isinstance(credential, (bytes, str)):
raise Exception("Expected credential to be a str, received: {}".format(type(credential)))
if name is not None and not isinstance(name, (bytes, str)):
raise Exception("Expected name to be a str, received: {}".format(type(name)))
if owner_tag is not None and not isinstance(owner_tag, (bytes, str)):
raise Exception("Expected owner_tag to be a str, received: {}".format(type(owner_tag)))
if region is not None and not isinstance(region, (bytes, str)):
raise Exception("Expected region to be a str, received: {}".format(type(region)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='CreateModel',
version=2,
params=_params)
_params['cloud-tag'] = cloud_tag
_params['config'] = config
_params['credential'] = credential
_params['name'] = name
_params['owner-tag'] = owner_tag
_params['region'] = region
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def DestroyModels(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='DestroyModels',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(MapResults)
async def DumpModels(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~MapResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='DumpModels',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(MapResults)
async def DumpModelsDB(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~MapResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='DumpModelsDB',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(UserModelList)
async def ListModels(self, tag=None):
'''
tag : str
Returns -> typing.Sequence[~UserModel]
'''
if tag is not None and not isinstance(tag, (bytes, str)):
raise Exception("Expected tag to be a str, received: {}".format(type(tag)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='ListModels',
version=2,
params=_params)
_params['tag'] = tag
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelDefaultsResult)
async def ModelDefaults(self):
'''
Returns -> typing.Mapping[str, ~ModelDefaults]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='ModelDefaults',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelInfoResults)
async def ModelInfo(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ModelInfoResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='ModelInfo',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ModelStatusResults)
async def ModelStatus(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ModelStatus]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='ModelStatus',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def ModifyModelAccess(self, changes=None):
'''
changes : typing.Sequence[~ModifyModelAccess]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='ModifyModelAccess',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetModelDefaults(self, config=None):
'''
config : typing.Sequence[~ModelDefaultValues]
Returns -> typing.Sequence[~ErrorResult]
'''
if config is not None and not isinstance(config, (bytes, str, list)):
raise Exception("Expected config to be a Sequence, received: {}".format(type(config)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='SetModelDefaults',
version=2,
params=_params)
_params['config'] = config
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def UnsetModelDefaults(self, keys=None):
'''
keys : typing.Sequence[~ModelUnsetKeys]
Returns -> typing.Sequence[~ErrorResult]
'''
if keys is not None and not isinstance(keys, (bytes, str, list)):
raise Exception("Expected keys to be a Sequence, received: {}".format(type(keys)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ModelManager',
request='UnsetModelDefaults',
version=2,
params=_params)
_params['keys'] = keys
reply = await self.rpc(msg)
return reply
class ProxyUpdaterFacade(Type):
name = 'ProxyUpdater'
version = 2
schema = {'definitions': {'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'NotifyWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/NotifyWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ProxyConfig': {'additionalProperties': False,
'properties': {'ftp': {'type': 'string'},
'http': {'type': 'string'},
'https': {'type': 'string'},
'no-proxy': {'type': 'string'}},
'required': ['http',
'https',
'ftp',
'no-proxy'],
'type': 'object'},
'ProxyConfigResult': {'additionalProperties': False,
'properties': {'apt-proxy-settings': {'$ref': '#/definitions/ProxyConfig'},
'error': {'$ref': '#/definitions/Error'},
'juju-proxy-settings': {'$ref': '#/definitions/ProxyConfig'},
'legacy-proxy-settings': {'$ref': '#/definitions/ProxyConfig'},
'snap-proxy-settings': {'$ref': '#/definitions/ProxyConfig'},
'snap-store-assertions': {'type': 'string'},
'snap-store-id': {'type': 'string'},
'snap-store-proxy-url': {'type': 'string'}},
'required': ['legacy-proxy-settings',
'juju-proxy-settings'],
'type': 'object'},
'ProxyConfigResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ProxyConfigResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'}},
'properties': {'ProxyConfig': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ProxyConfigResults'}},
'type': 'object'},
'WatchForProxyConfigAndAPIHostPortChanges': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/NotifyWatchResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ProxyConfigResults)
async def ProxyConfig(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ProxyConfigResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ProxyUpdater',
request='ProxyConfig',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResults)
async def WatchForProxyConfigAndAPIHostPortChanges(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~NotifyWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='ProxyUpdater',
request='WatchForProxyConfigAndAPIHostPortChanges',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
class RebootFacade(Type):
name = 'Reboot'
version = 2
schema = {'definitions': {'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'RebootActionResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'type': 'object'},
'RebootActionResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RebootActionResult'},
'type': 'array'}},
'type': 'object'}},
'properties': {'ClearReboot': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'GetRebootAction': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/RebootActionResults'}},
'type': 'object'},
'RequestReboot': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'WatchForRebootEvent': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def ClearReboot(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Reboot',
request='ClearReboot',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(RebootActionResults)
async def GetRebootAction(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~RebootActionResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Reboot',
request='GetRebootAction',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def RequestReboot(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Reboot',
request='RequestReboot',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchForRebootEvent(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Reboot',
request='WatchForRebootEvent',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class RemoteRelationsFacade(Type):
name = 'RemoteRelations'
version = 2
schema = {'definitions': {'ControllerAPIInfoResult': {'additionalProperties': False,
'properties': {'addresses': {'items': {'type': 'string'},
'type': 'array'},
'cacert': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['addresses',
'cacert'],
'type': 'object'},
'ControllerAPIInfoResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ControllerAPIInfoResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ControllerConfigResult': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityMacaroonArg': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'tag': {'type': 'string'}},
'required': ['macaroon', 'tag'],
'type': 'object'},
'EntityMacaroonArgs': {'additionalProperties': False,
'properties': {'Args': {'items': {'$ref': '#/definitions/EntityMacaroonArg'},
'type': 'array'}},
'required': ['Args'],
'type': 'object'},
'EntityStatusArgs': {'additionalProperties': False,
'properties': {'data': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'info': {'type': 'string'},
'status': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag',
'status',
'info',
'data'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ExternalControllerInfo': {'additionalProperties': False,
'properties': {'addrs': {'items': {'type': 'string'},
'type': 'array'},
'ca-cert': {'type': 'string'},
'controller-alias': {'type': 'string'},
'controller-tag': {'type': 'string'}},
'required': ['controller-tag',
'controller-alias',
'addrs',
'ca-cert'],
'type': 'object'},
'GetTokenArg': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'GetTokenArgs': {'additionalProperties': False,
'properties': {'Args': {'items': {'$ref': '#/definitions/GetTokenArg'},
'type': 'array'}},
'required': ['Args'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'RemoteApplication': {'additionalProperties': False,
'properties': {'is-consumer-proxy': {'type': 'boolean'},
'life': {'type': 'string'},
'macaroon': {'$ref': '#/definitions/Macaroon'},
'model-uuid': {'type': 'string'},
'name': {'type': 'string'},
'offer-uuid': {'type': 'string'},
'status': {'type': 'string'}},
'required': ['name',
'offer-uuid',
'model-uuid',
'is-consumer-proxy'],
'type': 'object'},
'RemoteApplicationResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/RemoteApplication'}},
'type': 'object'},
'RemoteApplicationResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RemoteApplicationResult'},
'type': 'array'}},
'type': 'object'},
'RemoteEndpoint': {'additionalProperties': False,
'properties': {'interface': {'type': 'string'},
'limit': {'type': 'integer'},
'name': {'type': 'string'},
'role': {'type': 'string'}},
'required': ['name',
'role',
'interface',
'limit'],
'type': 'object'},
'RemoteEntityTokenArg': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'},
'token': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'RemoteEntityTokenArgs': {'additionalProperties': False,
'properties': {'Args': {'items': {'$ref': '#/definitions/RemoteEntityTokenArg'},
'type': 'array'}},
'required': ['Args'],
'type': 'object'},
'RemoteRelation': {'additionalProperties': False,
'properties': {'application-name': {'type': 'string'},
'endpoint': {'$ref': '#/definitions/RemoteEndpoint'},
'id': {'type': 'integer'},
'key': {'type': 'string'},
'life': {'type': 'string'},
'remote-application-name': {'type': 'string'},
'remote-endpoint-name': {'type': 'string'},
'source-model-uuid': {'type': 'string'},
'suspended': {'type': 'boolean'}},
'required': ['life',
'suspended',
'id',
'key',
'application-name',
'endpoint',
'remote-application-name',
'remote-endpoint-name',
'source-model-uuid'],
'type': 'object'},
'RemoteRelationChangeEvent': {'additionalProperties': False,
'properties': {'application-settings': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'application-token': {'type': 'string'},
'bakery-version': {'type': 'integer'},
'changed-units': {'items': {'$ref': '#/definitions/RemoteRelationUnitChange'},
'type': 'array'},
'departed-units': {'items': {'type': 'integer'},
'type': 'array'},
'force-cleanup': {'type': 'boolean'},
'life': {'type': 'string'},
'macaroons': {'items': {'$ref': '#/definitions/Macaroon'},
'type': 'array'},
'relation-token': {'type': 'string'},
'suspended': {'type': 'boolean'},
'suspended-reason': {'type': 'string'}},
'required': ['relation-token',
'application-token',
'life'],
'type': 'object'},
'RemoteRelationResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/RemoteRelation'}},
'type': 'object'},
'RemoteRelationResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RemoteRelationResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RemoteRelationUnitChange': {'additionalProperties': False,
'properties': {'settings': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'unit-id': {'type': 'integer'}},
'required': ['unit-id'],
'type': 'object'},
'RemoteRelationWatchResult': {'additionalProperties': False,
'properties': {'changes': {'$ref': '#/definitions/RemoteRelationChangeEvent'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id',
'changes'],
'type': 'object'},
'RemoteRelationWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/RemoteRelationWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'RemoteRelationsChanges': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/RemoteRelationChangeEvent'},
'type': 'array'}},
'type': 'object'},
'SetStatus': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/EntityStatusArgs'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'StringResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'type': 'string'}},
'required': ['result'],
'type': 'object'},
'StringResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/StringResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'StringsWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'type': 'string'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id'],
'type': 'object'},
'StringsWatchResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/StringsWatchResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'TokenResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'token': {'type': 'string'}},
'type': 'object'},
'TokenResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/TokenResult'},
'type': 'array'}},
'type': 'object'},
'UpdateControllerForModel': {'additionalProperties': False,
'properties': {'info': {'$ref': '#/definitions/ExternalControllerInfo'},
'model-tag': {'type': 'string'}},
'required': ['model-tag', 'info'],
'type': 'object'},
'UpdateControllersForModelsParams': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/UpdateControllerForModel'},
'type': 'array'}},
'required': ['changes'],
'type': 'object'}},
'properties': {'ConsumeRemoteRelationChanges': {'properties': {'Params': {'$ref': '#/definitions/RemoteRelationsChanges'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'ControllerAPIInfoForModels': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ControllerAPIInfoResults'}},
'type': 'object'},
'ControllerConfig': {'properties': {'Result': {'$ref': '#/definitions/ControllerConfigResult'}},
'type': 'object'},
'ExportEntities': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/TokenResults'}},
'type': 'object'},
'GetTokens': {'properties': {'Params': {'$ref': '#/definitions/GetTokenArgs'},
'Result': {'$ref': '#/definitions/StringResults'}},
'type': 'object'},
'ImportRemoteEntities': {'properties': {'Params': {'$ref': '#/definitions/RemoteEntityTokenArgs'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Relations': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/RemoteRelationResults'}},
'type': 'object'},
'RemoteApplications': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/RemoteApplicationResults'}},
'type': 'object'},
'SaveMacaroons': {'properties': {'Params': {'$ref': '#/definitions/EntityMacaroonArgs'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'SetRemoteApplicationsStatus': {'properties': {'Params': {'$ref': '#/definitions/SetStatus'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'UpdateControllersForModels': {'properties': {'Params': {'$ref': '#/definitions/UpdateControllersForModelsParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'WatchLocalRelationChanges': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/RemoteRelationWatchResults'}},
'type': 'object'},
'WatchRemoteApplicationRelations': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/StringsWatchResults'}},
'type': 'object'},
'WatchRemoteApplications': {'properties': {'Result': {'$ref': '#/definitions/StringsWatchResult'}},
'type': 'object'},
'WatchRemoteRelations': {'properties': {'Result': {'$ref': '#/definitions/StringsWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def ConsumeRemoteRelationChanges(self, changes=None):
'''
changes : typing.Sequence[~RemoteRelationChangeEvent]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='ConsumeRemoteRelationChanges',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(ControllerAPIInfoResults)
async def ControllerAPIInfoForModels(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ControllerAPIInfoResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='ControllerAPIInfoForModels',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ControllerConfigResult)
async def ControllerConfig(self):
'''
Returns -> typing.Mapping[str, typing.Any]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='ControllerConfig',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(TokenResults)
async def ExportEntities(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~TokenResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='ExportEntities',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringResults)
async def GetTokens(self, args=None):
'''
args : typing.Sequence[~GetTokenArg]
Returns -> typing.Sequence[~StringResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='GetTokens',
version=2,
params=_params)
_params['Args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def ImportRemoteEntities(self, args=None):
'''
args : typing.Sequence[~RemoteEntityTokenArg]
Returns -> typing.Sequence[~ErrorResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='ImportRemoteEntities',
version=2,
params=_params)
_params['Args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(RemoteRelationResults)
async def Relations(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~RemoteRelationResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='Relations',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(RemoteApplicationResults)
async def RemoteApplications(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~RemoteApplicationResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='RemoteApplications',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SaveMacaroons(self, args=None):
'''
args : typing.Sequence[~EntityMacaroonArg]
Returns -> typing.Sequence[~ErrorResult]
'''
if args is not None and not isinstance(args, (bytes, str, list)):
raise Exception("Expected args to be a Sequence, received: {}".format(type(args)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='SaveMacaroons',
version=2,
params=_params)
_params['Args'] = args
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetRemoteApplicationsStatus(self, entities=None):
'''
entities : typing.Sequence[~EntityStatusArgs]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='SetRemoteApplicationsStatus',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def UpdateControllersForModels(self, changes=None):
'''
changes : typing.Sequence[~UpdateControllerForModel]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='UpdateControllersForModels',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(RemoteRelationWatchResults)
async def WatchLocalRelationChanges(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~RemoteRelationWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='WatchLocalRelationChanges',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringsWatchResults)
async def WatchRemoteApplicationRelations(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~StringsWatchResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='WatchRemoteApplicationRelations',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringsWatchResult)
async def WatchRemoteApplications(self):
'''
Returns -> typing.Union[typing.Sequence[str], _ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='WatchRemoteApplications',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(StringsWatchResult)
async def WatchRemoteRelations(self):
'''
Returns -> typing.Union[typing.Sequence[str], _ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='RemoteRelations',
request='WatchRemoteRelations',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class ResumerFacade(Type):
name = 'Resumer'
version = 2
schema = {'properties': {'ResumeTransactions': {'type': 'object'}}, 'type': 'object'}
@ReturnMapping(None)
async def ResumeTransactions(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Resumer',
request='ResumeTransactions',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class SSHClientFacade(Type):
name = 'SSHClient'
version = 2
schema = {'definitions': {'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'SSHAddressResult': {'additionalProperties': False,
'properties': {'address': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'SSHAddressResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/SSHAddressResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'SSHAddressesResult': {'additionalProperties': False,
'properties': {'addresses': {'items': {'type': 'string'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['addresses'],
'type': 'object'},
'SSHAddressesResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/SSHAddressesResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'SSHProxyResult': {'additionalProperties': False,
'properties': {'use-proxy': {'type': 'boolean'}},
'required': ['use-proxy'],
'type': 'object'},
'SSHPublicKeysResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'public-keys': {'items': {'type': 'string'},
'type': 'array'}},
'type': 'object'},
'SSHPublicKeysResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/SSHPublicKeysResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'}},
'properties': {'AllAddresses': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/SSHAddressesResults'}},
'type': 'object'},
'PrivateAddress': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/SSHAddressResults'}},
'type': 'object'},
'Proxy': {'properties': {'Result': {'$ref': '#/definitions/SSHProxyResult'}},
'type': 'object'},
'PublicAddress': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/SSHAddressResults'}},
'type': 'object'},
'PublicKeys': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/SSHPublicKeysResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(SSHAddressesResults)
async def AllAddresses(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~SSHAddressesResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='SSHClient',
request='AllAddresses',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(SSHAddressResults)
async def PrivateAddress(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~SSHAddressResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='SSHClient',
request='PrivateAddress',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(SSHProxyResult)
async def Proxy(self):
'''
Returns -> bool
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='SSHClient',
request='Proxy',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(SSHAddressResults)
async def PublicAddress(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~SSHAddressResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='SSHClient',
request='PublicAddress',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(SSHPublicKeysResults)
async def PublicKeys(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~SSHPublicKeysResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='SSHClient',
request='PublicKeys',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
class SingularFacade(Type):
name = 'Singular'
version = 2
schema = {'definitions': {'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'SingularClaim': {'additionalProperties': False,
'properties': {'claimant-tag': {'type': 'string'},
'duration': {'type': 'integer'},
'entity-tag': {'type': 'string'}},
'required': ['entity-tag',
'claimant-tag',
'duration'],
'type': 'object'},
'SingularClaims': {'additionalProperties': False,
'properties': {'claims': {'items': {'$ref': '#/definitions/SingularClaim'},
'type': 'array'}},
'required': ['claims'],
'type': 'object'}},
'properties': {'Claim': {'properties': {'Params': {'$ref': '#/definitions/SingularClaims'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'Wait': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def Claim(self, claims=None):
'''
claims : typing.Sequence[~SingularClaim]
Returns -> typing.Sequence[~ErrorResult]
'''
if claims is not None and not isinstance(claims, (bytes, str, list)):
raise Exception("Expected claims to be a Sequence, received: {}".format(type(claims)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Singular',
request='Claim',
version=2,
params=_params)
_params['claims'] = claims
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def Wait(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Singular',
request='Wait',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
class SpacesFacade(Type):
name = 'Spaces'
version = 2
schema = {'definitions': {'CreateSpaceParams': {'additionalProperties': False,
'properties': {'provider-id': {'type': 'string'},
'public': {'type': 'boolean'},
'space-tag': {'type': 'string'},
'subnet-tags': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['subnet-tags',
'space-tag',
'public'],
'type': 'object'},
'CreateSpacesParams': {'additionalProperties': False,
'properties': {'spaces': {'items': {'$ref': '#/definitions/CreateSpaceParams'},
'type': 'array'}},
'required': ['spaces'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'$ref': '#/definitions/ErrorInfo'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorInfo': {'additionalProperties': False,
'properties': {'macaroon': {'$ref': '#/definitions/Macaroon'},
'macaroon-path': {'type': 'string'}},
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ListSpacesResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/Space'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Macaroon': {'additionalProperties': False, 'type': 'object'},
'Space': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'name': {'type': 'string'},
'subnets': {'items': {'$ref': '#/definitions/Subnet'},
'type': 'array'}},
'required': ['name', 'subnets'],
'type': 'object'},
'Subnet': {'additionalProperties': False,
'properties': {'cidr': {'type': 'string'},
'life': {'type': 'string'},
'provider-id': {'type': 'string'},
'provider-network-id': {'type': 'string'},
'provider-space-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'status': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['cidr',
'vlan-tag',
'life',
'space-tag',
'zones'],
'type': 'object'}},
'properties': {'CreateSpaces': {'properties': {'Params': {'$ref': '#/definitions/CreateSpacesParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'ListSpaces': {'properties': {'Result': {'$ref': '#/definitions/ListSpacesResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def CreateSpaces(self, spaces=None):
'''
spaces : typing.Sequence[~CreateSpaceParams]
Returns -> typing.Sequence[~ErrorResult]
'''
if spaces is not None and not isinstance(spaces, (bytes, str, list)):
raise Exception("Expected spaces to be a Sequence, received: {}".format(type(spaces)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Spaces',
request='CreateSpaces',
version=2,
params=_params)
_params['spaces'] = spaces
reply = await self.rpc(msg)
return reply
@ReturnMapping(ListSpacesResults)
async def ListSpaces(self):
'''
Returns -> typing.Sequence[~Space]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Spaces',
request='ListSpaces',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class StatusHistoryFacade(Type):
name = 'StatusHistory'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ModelConfigResult': {'additionalProperties': False,
'properties': {'config': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'}},
'required': ['config'],
'type': 'object'},
'NotifyWatchResult': {'additionalProperties': False,
'properties': {'NotifyWatcherId': {'type': 'string'},
'error': {'$ref': '#/definitions/Error'}},
'required': ['NotifyWatcherId'],
'type': 'object'},
'StatusHistoryPruneArgs': {'additionalProperties': False,
'properties': {'max-history-mb': {'type': 'integer'},
'max-history-time': {'type': 'integer'}},
'required': ['max-history-time',
'max-history-mb'],
'type': 'object'}},
'properties': {'ModelConfig': {'properties': {'Result': {'$ref': '#/definitions/ModelConfigResult'}},
'type': 'object'},
'Prune': {'properties': {'Params': {'$ref': '#/definitions/StatusHistoryPruneArgs'}},
'type': 'object'},
'WatchForModelConfigChanges': {'properties': {'Result': {'$ref': '#/definitions/NotifyWatchResult'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ModelConfigResult)
async def ModelConfig(self):
'''
Returns -> typing.Mapping[str, typing.Any]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='StatusHistory',
request='ModelConfig',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Prune(self, max_history_mb=None, max_history_time=None):
'''
max_history_mb : int
max_history_time : int
Returns -> None
'''
if max_history_mb is not None and not isinstance(max_history_mb, int):
raise Exception("Expected max_history_mb to be a int, received: {}".format(type(max_history_mb)))
if max_history_time is not None and not isinstance(max_history_time, int):
raise Exception("Expected max_history_time to be a int, received: {}".format(type(max_history_time)))
# map input types to rpc msg
_params = dict()
msg = dict(type='StatusHistory',
request='Prune',
version=2,
params=_params)
_params['max-history-mb'] = max_history_mb
_params['max-history-time'] = max_history_time
reply = await self.rpc(msg)
return reply
@ReturnMapping(NotifyWatchResult)
async def WatchForModelConfigChanges(self):
'''
Returns -> typing.Union[str, _ForwardRef('Error')]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='StatusHistory',
request='WatchForModelConfigChanges',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
class SubnetsFacade(Type):
name = 'Subnets'
version = 2
schema = {'definitions': {'AddSubnetParams': {'additionalProperties': False,
'properties': {'provider-network-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'subnet-provider-id': {'type': 'string'},
'subnet-tag': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['space-tag'],
'type': 'object'},
'AddSubnetsParams': {'additionalProperties': False,
'properties': {'subnets': {'items': {'$ref': '#/definitions/AddSubnetParams'},
'type': 'array'}},
'required': ['subnets'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'ListSubnetsResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/Subnet'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'SpaceResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'SpaceResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/SpaceResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'Subnet': {'additionalProperties': False,
'properties': {'cidr': {'type': 'string'},
'life': {'type': 'string'},
'provider-id': {'type': 'string'},
'provider-network-id': {'type': 'string'},
'provider-space-id': {'type': 'string'},
'space-tag': {'type': 'string'},
'status': {'type': 'string'},
'vlan-tag': {'type': 'integer'},
'zones': {'items': {'type': 'string'},
'type': 'array'}},
'required': ['cidr',
'vlan-tag',
'life',
'space-tag',
'zones'],
'type': 'object'},
'SubnetsFilters': {'additionalProperties': False,
'properties': {'space-tag': {'type': 'string'},
'zone': {'type': 'string'}},
'type': 'object'},
'ZoneResult': {'additionalProperties': False,
'properties': {'available': {'type': 'boolean'},
'error': {'$ref': '#/definitions/Error'},
'name': {'type': 'string'}},
'required': ['name', 'available'],
'type': 'object'},
'ZoneResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ZoneResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'}},
'properties': {'AddSubnets': {'properties': {'Params': {'$ref': '#/definitions/AddSubnetsParams'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'AllSpaces': {'properties': {'Result': {'$ref': '#/definitions/SpaceResults'}},
'type': 'object'},
'AllZones': {'properties': {'Result': {'$ref': '#/definitions/ZoneResults'}},
'type': 'object'},
'ListSubnets': {'properties': {'Params': {'$ref': '#/definitions/SubnetsFilters'},
'Result': {'$ref': '#/definitions/ListSubnetsResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(ErrorResults)
async def AddSubnets(self, subnets=None):
'''
subnets : typing.Sequence[~AddSubnetParams]
Returns -> typing.Sequence[~ErrorResult]
'''
if subnets is not None and not isinstance(subnets, (bytes, str, list)):
raise Exception("Expected subnets to be a Sequence, received: {}".format(type(subnets)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Subnets',
request='AddSubnets',
version=2,
params=_params)
_params['subnets'] = subnets
reply = await self.rpc(msg)
return reply
@ReturnMapping(SpaceResults)
async def AllSpaces(self):
'''
Returns -> typing.Sequence[~SpaceResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Subnets',
request='AllSpaces',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ZoneResults)
async def AllZones(self):
'''
Returns -> typing.Sequence[~ZoneResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Subnets',
request='AllZones',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(ListSubnetsResults)
async def ListSubnets(self, space_tag=None, zone=None):
'''
space_tag : str
zone : str
Returns -> typing.Sequence[~Subnet]
'''
if space_tag is not None and not isinstance(space_tag, (bytes, str)):
raise Exception("Expected space_tag to be a str, received: {}".format(type(space_tag)))
if zone is not None and not isinstance(zone, (bytes, str)):
raise Exception("Expected zone to be a str, received: {}".format(type(zone)))
# map input types to rpc msg
_params = dict()
msg = dict(type='Subnets',
request='ListSubnets',
version=2,
params=_params)
_params['space-tag'] = space_tag
_params['zone'] = zone
reply = await self.rpc(msg)
return reply
class UserManagerFacade(Type):
name = 'UserManager'
version = 2
schema = {'definitions': {'AddUser': {'additionalProperties': False,
'properties': {'display-name': {'type': 'string'},
'password': {'type': 'string'},
'username': {'type': 'string'}},
'required': ['username', 'display-name'],
'type': 'object'},
'AddUserResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'secret-key': {'items': {'type': 'integer'},
'type': 'array'},
'tag': {'type': 'string'}},
'type': 'object'},
'AddUserResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/AddUserResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'AddUsers': {'additionalProperties': False,
'properties': {'users': {'items': {'$ref': '#/definitions/AddUser'},
'type': 'array'}},
'required': ['users'],
'type': 'object'},
'Entities': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'}},
'required': ['entities'],
'type': 'object'},
'Entity': {'additionalProperties': False,
'properties': {'tag': {'type': 'string'}},
'required': ['tag'],
'type': 'object'},
'EntityPassword': {'additionalProperties': False,
'properties': {'password': {'type': 'string'},
'tag': {'type': 'string'}},
'required': ['tag', 'password'],
'type': 'object'},
'EntityPasswords': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/EntityPassword'},
'type': 'array'}},
'required': ['changes'],
'type': 'object'},
'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'ErrorResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'}},
'type': 'object'},
'ErrorResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/ErrorResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'},
'UserInfo': {'additionalProperties': False,
'properties': {'access': {'type': 'string'},
'created-by': {'type': 'string'},
'date-created': {'format': 'date-time',
'type': 'string'},
'disabled': {'type': 'boolean'},
'display-name': {'type': 'string'},
'last-connection': {'format': 'date-time',
'type': 'string'},
'username': {'type': 'string'}},
'required': ['username',
'display-name',
'access',
'created-by',
'date-created',
'disabled'],
'type': 'object'},
'UserInfoRequest': {'additionalProperties': False,
'properties': {'entities': {'items': {'$ref': '#/definitions/Entity'},
'type': 'array'},
'include-disabled': {'type': 'boolean'}},
'required': ['entities',
'include-disabled'],
'type': 'object'},
'UserInfoResult': {'additionalProperties': False,
'properties': {'error': {'$ref': '#/definitions/Error'},
'result': {'$ref': '#/definitions/UserInfo'}},
'type': 'object'},
'UserInfoResults': {'additionalProperties': False,
'properties': {'results': {'items': {'$ref': '#/definitions/UserInfoResult'},
'type': 'array'}},
'required': ['results'],
'type': 'object'}},
'properties': {'AddUser': {'properties': {'Params': {'$ref': '#/definitions/AddUsers'},
'Result': {'$ref': '#/definitions/AddUserResults'}},
'type': 'object'},
'DisableUser': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'EnableUser': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'RemoveUser': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'ResetPassword': {'properties': {'Params': {'$ref': '#/definitions/Entities'},
'Result': {'$ref': '#/definitions/AddUserResults'}},
'type': 'object'},
'SetPassword': {'properties': {'Params': {'$ref': '#/definitions/EntityPasswords'},
'Result': {'$ref': '#/definitions/ErrorResults'}},
'type': 'object'},
'UserInfo': {'properties': {'Params': {'$ref': '#/definitions/UserInfoRequest'},
'Result': {'$ref': '#/definitions/UserInfoResults'}},
'type': 'object'}},
'type': 'object'}
@ReturnMapping(AddUserResults)
async def AddUser(self, users=None):
'''
users : typing.Sequence[~AddUser]
Returns -> typing.Sequence[~AddUserResult]
'''
if users is not None and not isinstance(users, (bytes, str, list)):
raise Exception("Expected users to be a Sequence, received: {}".format(type(users)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='AddUser',
version=2,
params=_params)
_params['users'] = users
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def DisableUser(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='DisableUser',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def EnableUser(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='EnableUser',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def RemoveUser(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~ErrorResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='RemoveUser',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(AddUserResults)
async def ResetPassword(self, entities=None):
'''
entities : typing.Sequence[~Entity]
Returns -> typing.Sequence[~AddUserResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='ResetPassword',
version=2,
params=_params)
_params['entities'] = entities
reply = await self.rpc(msg)
return reply
@ReturnMapping(ErrorResults)
async def SetPassword(self, changes=None):
'''
changes : typing.Sequence[~EntityPassword]
Returns -> typing.Sequence[~ErrorResult]
'''
if changes is not None and not isinstance(changes, (bytes, str, list)):
raise Exception("Expected changes to be a Sequence, received: {}".format(type(changes)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='SetPassword',
version=2,
params=_params)
_params['changes'] = changes
reply = await self.rpc(msg)
return reply
@ReturnMapping(UserInfoResults)
async def UserInfo(self, entities=None, include_disabled=None):
'''
entities : typing.Sequence[~Entity]
include_disabled : bool
Returns -> typing.Sequence[~UserInfoResult]
'''
if entities is not None and not isinstance(entities, (bytes, str, list)):
raise Exception("Expected entities to be a Sequence, received: {}".format(type(entities)))
if include_disabled is not None and not isinstance(include_disabled, bool):
raise Exception("Expected include_disabled to be a bool, received: {}".format(type(include_disabled)))
# map input types to rpc msg
_params = dict()
msg = dict(type='UserManager',
request='UserInfo',
version=2,
params=_params)
_params['entities'] = entities
_params['include-disabled'] = include_disabled
reply = await self.rpc(msg)
return reply
class VolumeAttachmentsWatcherFacade(Type):
name = 'VolumeAttachmentsWatcher'
version = 2
schema = {'definitions': {'Error': {'additionalProperties': False,
'properties': {'code': {'type': 'string'},
'info': {'patternProperties': {'.*': {'additionalProperties': True,
'type': 'object'}},
'type': 'object'},
'message': {'type': 'string'}},
'required': ['message', 'code'],
'type': 'object'},
'MachineStorageId': {'additionalProperties': False,
'properties': {'attachment-tag': {'type': 'string'},
'machine-tag': {'type': 'string'}},
'required': ['machine-tag',
'attachment-tag'],
'type': 'object'},
'MachineStorageIdsWatchResult': {'additionalProperties': False,
'properties': {'changes': {'items': {'$ref': '#/definitions/MachineStorageId'},
'type': 'array'},
'error': {'$ref': '#/definitions/Error'},
'watcher-id': {'type': 'string'}},
'required': ['watcher-id',
'changes'],
'type': 'object'}},
'properties': {'Next': {'properties': {'Result': {'$ref': '#/definitions/MachineStorageIdsWatchResult'}},
'type': 'object'},
'Stop': {'type': 'object'}},
'type': 'object'}
@ReturnMapping(MachineStorageIdsWatchResult)
async def Next(self):
'''
Returns -> typing.Union[typing.Sequence[~MachineStorageId], _ForwardRef('Error'), str]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='VolumeAttachmentsWatcher',
request='Next',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
@ReturnMapping(None)
async def Stop(self):
'''
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='VolumeAttachmentsWatcher',
request='Stop',
version=2,
params=_params)
reply = await self.rpc(msg)
return reply
| 55.227892
| 309
| 0.3673
|
7354451ae75cb5f78069db39dec6d86d7f0c67a5
| 1,940
|
py
|
Python
|
disentanglement_lib/methods/shared/optimizers.py
|
travers-rhodes/disentanglement_lib
|
73d4b995e88efdd5ffbe98a72e48a620c58f4dc7
|
[
"Apache-2.0"
] | 1,280
|
2019-02-09T20:27:37.000Z
|
2022-03-29T18:02:23.000Z
|
disentanglement_lib/methods/shared/optimizers.py
|
travers-rhodes/disentanglement_lib
|
73d4b995e88efdd5ffbe98a72e48a620c58f4dc7
|
[
"Apache-2.0"
] | 34
|
2019-02-14T23:18:01.000Z
|
2021-11-23T00:55:54.000Z
|
disentanglement_lib/methods/shared/optimizers.py
|
travers-rhodes/disentanglement_lib
|
73d4b995e88efdd5ffbe98a72e48a620c58f4dc7
|
[
"Apache-2.0"
] | 199
|
2019-02-13T17:52:00.000Z
|
2022-03-11T17:37:26.000Z
|
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library of commonly used optimizers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
import gin.tf
def make_optimizer(optimizer_fn, learning_rate):
"""Wrapper to create the optimizer with a given learning_rate."""
if learning_rate is None:
# Learning rate is specified in the optimizer_fn options, or left to its
# default value.
return optimizer_fn()
else:
# Learning rate is explicitly specified in vae/discriminator optimizer.
# If it is callable, we assume it's a LR decay function which needs the
# current global step.
if callable(learning_rate):
learning_rate = learning_rate(global_step=tf.train.get_global_step())
return optimizer_fn(learning_rate=learning_rate)
@gin.configurable("vae_optimizer")
def make_vae_optimizer(optimizer_fn=gin.REQUIRED, learning_rate=None):
"""Wrapper that uses gin to construct an optimizer for VAEs."""
return make_optimizer(optimizer_fn, learning_rate)
@gin.configurable("discriminator_optimizer")
def make_discriminator_optimizer(optimizer_fn=gin.REQUIRED, learning_rate=None):
"""Wrapper that uses gin to construct an optimizer for the discriminator."""
return make_optimizer(optimizer_fn, learning_rate)
| 38.8
| 80
| 0.776804
|
7dd17567fc31ccac2ebeb8c3dfc64c7b4a29a471
| 307
|
py
|
Python
|
django_referrer_policy/tests/urls.py
|
adamchainz/django-referrer-policy
|
507a83826a4e3f65f05332c72251ebdedd49f5bf
|
[
"BSD-3-Clause"
] | 33
|
2018-02-12T03:41:09.000Z
|
2022-01-07T13:54:03.000Z
|
django_referrer_policy/tests/urls.py
|
adamchainz/django-referrer-policy
|
507a83826a4e3f65f05332c72251ebdedd49f5bf
|
[
"BSD-3-Clause"
] | 2
|
2018-07-06T06:07:24.000Z
|
2019-08-11T09:02:10.000Z
|
django_referrer_policy/tests/urls.py
|
adamchainz/django-referrer-policy
|
507a83826a4e3f65f05332c72251ebdedd49f5bf
|
[
"BSD-3-Clause"
] | 4
|
2018-10-09T23:16:17.000Z
|
2019-07-16T12:08:50.000Z
|
from django.conf.urls import url
from django.http import HttpResponse
def view(request):
"""
A minimal view for use in testing.
"""
return HttpResponse('Content.')
urlpatterns = [
url(r'^referrer-policy-middleware$',
view,
name='test-referrer-policy-middleware'),
]
| 17.055556
| 48
| 0.651466
|
01c071a2f63472929db5d0ba18742f1fc40c29a9
| 3,494
|
py
|
Python
|
pypureclient/flasharray/FA_2_4/models/time_window.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 14
|
2018-12-07T18:30:27.000Z
|
2022-02-22T09:12:33.000Z
|
pypureclient/flasharray/FA_2_4/models/time_window.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 28
|
2019-09-17T21:03:52.000Z
|
2022-03-29T22:07:35.000Z
|
pypureclient/flasharray/FA_2_4/models/time_window.py
|
Flav-STOR-WL/py-pure-client
|
03b889c997d90380ac5d6380ca5d5432792d3e89
|
[
"BSD-2-Clause"
] | 15
|
2020-06-11T15:50:08.000Z
|
2022-03-21T09:27:25.000Z
|
# coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_4 import models
class TimeWindow(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'start': 'int',
'end': 'int'
}
attribute_map = {
'start': 'start',
'end': 'end'
}
required_args = {
}
def __init__(
self,
start=None, # type: int
end=None, # type: int
):
"""
Keyword args:
start (int): The window start time. Measured in milliseconds since midnight. The time must be set on the hour. (e.g., `18000000`, which is equal to 5:00 AM).
end (int): The window end time. Measured in milliseconds since midnight. The time must be set on the hour. (e.g., `28800000`, which is equal to 8:00 AM).
"""
if start is not None:
self.start = start
if end is not None:
self.end = end
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `TimeWindow`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(TimeWindow, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TimeWindow):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 29.610169
| 169
| 0.544362
|
ccaa6d65ffe8284f14b7293adeca6609fcb17bc5
| 546
|
py
|
Python
|
gdsfactory/tests/test_serialize.py
|
simbilod/gdsfactory
|
4d76db32674c3edb4d16260e3177ee29ef9ce11d
|
[
"MIT"
] | null | null | null |
gdsfactory/tests/test_serialize.py
|
simbilod/gdsfactory
|
4d76db32674c3edb4d16260e3177ee29ef9ce11d
|
[
"MIT"
] | null | null | null |
gdsfactory/tests/test_serialize.py
|
simbilod/gdsfactory
|
4d76db32674c3edb4d16260e3177ee29ef9ce11d
|
[
"MIT"
] | null | null | null |
import gdsfactory as gf
from gdsfactory.cross_section import strip
@gf.cell
def demo_cross_section_setting(cross_section=strip()) -> gf.Component:
return gf.components.straight(cross_section=cross_section)
def test_settings(data_regression, check: bool = True) -> None:
"""Avoid regressions when exporting settings."""
component = demo_cross_section_setting()
data_regression.check(component.to_dict())
if __name__ == "__main__":
c = demo_cross_section_setting()
d = c.to_dict()
# c.show()
# test_settings()
| 26
| 70
| 0.736264
|
d760a8437b362b7728f0c33bdf1ec9b14c091911
| 1,151
|
py
|
Python
|
tag_resolver_proxy/resolve_tags/quay_io.py
|
verygood-ops/kritis-reverse-proxy
|
10a635d52132f1e5529cea1f39a3c8ae0fc43ece
|
[
"MIT"
] | 1
|
2020-06-24T22:20:26.000Z
|
2020-06-24T22:20:26.000Z
|
tag_resolver_proxy/resolve_tags/quay_io.py
|
verygood-ops/kritis-reverse-proxy
|
10a635d52132f1e5529cea1f39a3c8ae0fc43ece
|
[
"MIT"
] | null | null | null |
tag_resolver_proxy/resolve_tags/quay_io.py
|
verygood-ops/kritis-reverse-proxy
|
10a635d52132f1e5529cea1f39a3c8ae0fc43ece
|
[
"MIT"
] | null | null | null |
from tag_resolver_proxy.resolve_tags import base
from tag_resolver_proxy.resolve_tags.base import ImageProperties
def quay_repository_url(organization: str, software: str) -> str:
return f'https://quay.io/api/v1/repository/{organization}/{software}'
class QuayIOTagResolver(base.TagResolver):
registry_base_uri = 'quay.io'
async def resolve_single_image(self, image_props: ImageProperties) -> str:
"""Resolve single image digest using Quay API."""
tags = (
await (
await self.client.get(quay_repository_url(image_props.org, image_props.software))
).json()
).get('tags', {})
tag_metadata = tags.get(image_props.tag)
assert tag_metadata, f'Unknown image {image_props.url}'
assert tag_metadata['manifest_digest'], 'Unknown Quay response format'
return f'quay.io/{image_props.org}/{image_props.software}@{tag_metadata["manifest_digest"]}'
def get_client_headers(self):
headers = super().get_client_headers()
if self.token:
headers.update({'Authorization': f'Bearer {self.token}'})
return headers
| 33.852941
| 100
| 0.681147
|
d5aa03372c63ae3c9691fe8a86796cb596677d96
| 2,300
|
py
|
Python
|
test/segments_test/hg_test.py
|
Pl4gue/powerline-shell
|
ae33f7e7cf48583c7647a3481967aa0dfdb9909e
|
[
"MIT"
] | null | null | null |
test/segments_test/hg_test.py
|
Pl4gue/powerline-shell
|
ae33f7e7cf48583c7647a3481967aa0dfdb9909e
|
[
"MIT"
] | null | null | null |
test/segments_test/hg_test.py
|
Pl4gue/powerline-shell
|
ae33f7e7cf48583c7647a3481967aa0dfdb9909e
|
[
"MIT"
] | 1
|
2020-04-19T04:53:34.000Z
|
2020-04-19T04:53:34.000Z
|
import unittest
import mock
import tempfile
import shutil
import sh
import powerline_shell.segments.hg as hg
from powerline_shell.utils import RepoStats
from ..testing_utils import dict_side_effect_fn
test_cases = {
"? new-file": RepoStats(new=1),
"M modified-file": RepoStats(changed=1),
"R removed-file": RepoStats(changed=1),
"! missing-file": RepoStats(changed=1),
"A added-file": RepoStats(staged=1),
}
class HgTest(unittest.TestCase):
def setUp(self):
self.powerline = mock.MagicMock()
self.powerline.segment_conf.side_effect = dict_side_effect_fn({
("vcs", "show_symbol"): False,
})
self.dirname = tempfile.mkdtemp()
sh.cd(self.dirname)
sh.hg("init", ".")
self.segment = hg.Segment(self.powerline)
def tearDown(self):
shutil.rmtree(self.dirname)
def _add_and_commit(self, filename):
sh.touch(filename)
sh.hg("add", filename)
sh.hg("commit", "-m", "add file " + filename)
def _checkout_new_branch(self, branch):
sh.hg("branch", branch)
@mock.patch("powerline_shell.segments.hg.get_PATH")
def test_hg_not_installed(self, get_PATH):
get_PATH.return_value = "" # so hg can"t be found
self.segment.start()
self.segment.add_to_powerline()
self.assertEqual(self.powerline.append.call_count, 0)
def test_non_hg_directory(self):
shutil.rmtree(".hg")
self.segment.start()
self.segment.add_to_powerline()
self.assertEqual(self.powerline.append.call_count, 0)
def test_standard(self):
self._add_and_commit("foo")
self.segment.start()
self.segment.add_to_powerline()
self.assertEqual(self.powerline.append.call_args[0][0], " default ")
def test_different_branch(self):
self._add_and_commit("foo")
self._checkout_new_branch("bar")
self.segment.start()
self.segment.add_to_powerline()
self.assertEqual(self.powerline.append.call_args[0][0], " bar ")
@mock.patch('powerline_shell.segments.hg._get_hg_status')
def test_all(self, check_output):
for stdout, result in test_cases.items():
stats = hg.parse_hg_stats([stdout])
self.assertEquals(result, stats)
| 30.263158
| 76
| 0.655652
|
bc5d5f327c98c6dd5e1b187534509a9bb97f8f22
| 168
|
py
|
Python
|
examples/core/datasets/__init__.py
|
zlianghahaha/torchquantum
|
685fa5621c0bf24d47d94e37c82ce7fd3c04ef1d
|
[
"MIT"
] | null | null | null |
examples/core/datasets/__init__.py
|
zlianghahaha/torchquantum
|
685fa5621c0bf24d47d94e37c82ce7fd3c04ef1d
|
[
"MIT"
] | null | null | null |
examples/core/datasets/__init__.py
|
zlianghahaha/torchquantum
|
685fa5621c0bf24d47d94e37c82ce7fd3c04ef1d
|
[
"MIT"
] | null | null | null |
from .mnist import *
from .vowel import *
from .layer_regression import *
from .vqe import *
from .cifar10 import *
from .simple2cls import *
from .simple3cls import *
| 21
| 31
| 0.75
|
799e1b69e4fbc5dc0826eb03b938b3d35dab09f9
| 505
|
py
|
Python
|
tests/test_sentinel.py
|
PaulRenvoise/flashback
|
f9a16f4b0cb12a2180206c7b95d9eb8fb256381d
|
[
"MIT"
] | 3
|
2021-06-08T11:40:59.000Z
|
2022-03-31T16:22:56.000Z
|
tests/test_sentinel.py
|
PaulRenvoise/flashback
|
f9a16f4b0cb12a2180206c7b95d9eb8fb256381d
|
[
"MIT"
] | 28
|
2020-04-28T22:36:14.000Z
|
2021-06-06T20:32:00.000Z
|
tests/test_sentinel.py
|
PaulRenvoise/flashback
|
f9a16f4b0cb12a2180206c7b95d9eb8fb256381d
|
[
"MIT"
] | null | null | null |
# pylint: disable=no-self-use
from flashback import Sentinel
class TestSentinel:
def test_instance_equality(self):
sentinel = Sentinel()
assert sentinel == Sentinel()
def test_instance_identity(self):
sentinel = Sentinel()
assert sentinel is Sentinel()
def test_class_equality(self):
sentinel = Sentinel()
assert sentinel == Sentinel
def test_class_identity(self):
sentinel = Sentinel()
assert sentinel is Sentinel
| 19.423077
| 37
| 0.657426
|
cdf18ccfdbf23f23279f0be7b7c33f771a63001b
| 2,465
|
py
|
Python
|
main/main_edge_ppi.py
|
khangtran2020/contentDP
|
d0d83c387f2d21fce342d0c445eb06ec928c1414
|
[
"MIT"
] | null | null | null |
main/main_edge_ppi.py
|
khangtran2020/contentDP
|
d0d83c387f2d21fce342d0c445eb06ec928c1414
|
[
"MIT"
] | null | null | null |
main/main_edge_ppi.py
|
khangtran2020/contentDP
|
d0d83c387f2d21fce342d0c445eb06ec928c1414
|
[
"MIT"
] | null | null | null |
import os
import warnings
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=UserWarning)
warnings.simplefilter(action='ignore', category=FutureWarning)
import dgl
import numpy as np
from tqdm import tqdm
from Utils.DataProcessing import *
from Models.GCN import GCNMultiLabel
from Datasets.PPIDataset import PPIEdgeDataset
from Trainer.Trainer import TrainerPPI
import torch
import sys
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(device)
num_channel = 128
learning_rate = 0.001
epochs = 20000
patience = 50
num_run = 1
num_feat = 2048
private_edge_rate = sys.argv[1]
epsilon_edge = sys.argv[2]
# eps_feat, eps_edge, mode, p_rate
dataset_train = PPIEdgeDataset(eps_edge=epsilon_edge, mode='train', p_rate=private_edge_rate)
dataset_val = PPIEdgeDataset(eps_edge=epsilon_edge, mode='valid', p_rate=private_edge_rate)
dataset_test = PPIEdgeDataset(eps_edge=epsilon_edge, mode='test', p_rate=private_edge_rate)
num_class = dataset_train.num_labels
save_model_path = '22JAN2022/'
all_result = {}
avg_result = {}
temp_f1 = []
# (self, num_epoch, learning_rate, patience, model, dataset_train, dataset_val, dataset_test, name_model, device, train_mode)
for run in range(num_run):
print("Run {}".format(run + 1))
name_model_to_save = save_model_path + "ppi_p_rate_{}_edgeeps_{}_model_run_{}.pt".format(private_edge_rate, epsilon_edge, run+1)
model = GCNMultiLabel(in_feats= dataset_train[0][0].ndata['feat'].shape[1], h_feats=num_channel, num_classes = num_class)
trainer = TrainerPPI(num_epoch=epochs, learning_rate=learning_rate, patience=patience, model=model, dataset_train=dataset_train,
dataset_val=dataset_val, dataset_test=dataset_test, name_model=name_model_to_save, device=device, train_mode='edge')
f1 = trainer.train_feat_edge()
all_result["ppi_p_rate_{}_edgeeps_{}_model_run_{}".format(private_edge_rate, epsilon_edge, run+1)] = f1
temp_f1.append(f1)
avg_result["ppi_p_rate_{}_edgeeps_{}".format(private_edge_rate, epsilon_edge)] = np.mean(np.array(temp_f1))
print("=============== ALL RESULTS: ===================")
for key in all_result:
print(key, all_result[key])
print("=============== AVG RESULTS: ===================")
for key in avg_result:
print(key, avg_result[key])
| 37.923077
| 132
| 0.752535
|
87358730222eaf6310c6dc341da5b5913bef45ab
| 4,236
|
py
|
Python
|
rprop.py
|
JoonyoungYi/RProp-tensorflow
|
cd52ee789136453267762b1e428984af018890e7
|
[
"MIT"
] | 9
|
2019-03-11T09:23:54.000Z
|
2021-11-25T14:00:34.000Z
|
rprop.py
|
JoonyoungYi/RProp-tensorflow
|
cd52ee789136453267762b1e428984af018890e7
|
[
"MIT"
] | null | null | null |
rprop.py
|
JoonyoungYi/RProp-tensorflow
|
cd52ee789136453267762b1e428984af018890e7
|
[
"MIT"
] | null | null | null |
"""
RProp (Resilient Backpropagation) for TensorFlow.
This code is forked form "https://raw.githubusercontent.com/dirkweissenborn/genie-kb/master/rprop.py".
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.training import optimizer
class RPropOptimizer(optimizer.Optimizer):
"""
Optimizer that implements the RProp algorithm.
"""
def __init__(self,
stepsize=0.1,
etaplus=1.2,
etaminus=0.5,
stepsizemax=50.0,
stepsizemin=1e-6,
use_locking=False,
name="RProp"):
super(RPropOptimizer, self).__init__(use_locking, name)
self._stepsize = stepsize
self._etaplus = etaplus
self._etaminus = etaminus
self._stepsizemax = stepsizemax
self._stepsizemin = stepsizemin
def _create_slots(self, var_list):
'''
:param var_list:
:return:
'''
# Create the beta1 and beta2 accumulators on the same device as the first
# variable.
# Create slots for the first and second moments.
for v in var_list:
self._get_or_make_slot(
v,
tf.ones([v.get_shape().num_elements()], dtype=tf.float32) *
self._stepsize,
"step",
self._name, )
self._get_or_make_slot(
v,
tf.zeros([v.get_shape().num_elements()], dtype=tf.float32),
"delta",
self._name, )
self._get_or_make_slot(
v,
tf.zeros([v.get_shape().num_elements()], dtype=tf.float32),
"grad",
self._name, )
def _apply_dense(self, grad, var):
grad_slot = self.get_slot(var, "grad")
step_slot = self.get_slot(var, "step")
delta_slot = self.get_slot(var, "delta")
grad = tf.reshape(grad, [-1])
sign = tf.cast(tf.sign(grad_slot * grad), tf.int64)
with tf.control_dependencies([sign]):
grad = grad_slot.assign(grad)
p_indices = tf.where(tf.equal(sign, 1)) # positive indices
m_indices = tf.where(tf.equal(sign, -1)) # minus indices
z_indices = tf.where(tf.equal(sign, 0)) # zero indices
step_p_update = tf.expand_dims(
tf.minimum(
tf.gather_nd(step_slot, p_indices) * self._etaplus,
self._stepsizemax), 1)
step_m_update = tf.expand_dims(
tf.maximum(
tf.gather_nd(step_slot, m_indices) * self._etaminus,
self._stepsizemin), 1)
step_z_update = tf.expand_dims(tf.gather_nd(step_slot, z_indices), 1)
with tf.control_dependencies(
[step_p_update, step_m_update, step_z_update]):
step = tf.scatter_update(step_slot, p_indices, step_p_update)
step = tf.scatter_update(step, m_indices, step_m_update)
step = tf.scatter_update(step, z_indices, step_z_update)
step = step_slot.assign(step)
delta_p_update = tf.expand_dims(
tf.gather_nd(tf.sign(grad) * step, p_indices), 1)
delta_z_update = tf.expand_dims(
tf.gather_nd(tf.sign(grad) * step, z_indices), 1)
with tf.control_dependencies([delta_p_update, delta_z_update]):
delta = tf.scatter_update(delta_slot, p_indices, delta_p_update)
delta = tf.scatter_update(delta, z_indices, delta_z_update)
delta = delta_slot.assign(delta)
with tf.control_dependencies([sign]):
grad = tf.scatter_update(grad, m_indices,
tf.zeros_like(m_indices, tf.float32))
grad = grad_slot.assign(grad)
up = tf.reshape(delta, var.get_shape())
var_update = var.assign_sub(up, use_locking=self._use_locking)
return tf.group(*[var_update, step, delta, grad])
def _apply_sparse(self, grad, var):
raise NotImplementedError("RProp should be used only in batch_mode.")
| 37.486726
| 106
| 0.592304
|
8534c6cacd468b3186aa43814331562690b25f8c
| 4,403
|
py
|
Python
|
taln2016/icsisumm-primary-sys34_v1/eval_taln.py
|
hectormartinez/rougexstem
|
32da9eab253cb88fc1882e59026e8b5b40900a25
|
[
"Apache-2.0"
] | null | null | null |
taln2016/icsisumm-primary-sys34_v1/eval_taln.py
|
hectormartinez/rougexstem
|
32da9eab253cb88fc1882e59026e8b5b40900a25
|
[
"Apache-2.0"
] | null | null | null |
taln2016/icsisumm-primary-sys34_v1/eval_taln.py
|
hectormartinez/rougexstem
|
32da9eab253cb88fc1882e59026e8b5b40900a25
|
[
"Apache-2.0"
] | null | null | null |
import sys, os, os.path, tempfile, re, collections, shutil
rg = 'perl /home/alonso/tool/ROUGE-1.5.5/ROUGE_externalstemmer-1.5.5.pl'
def create_config_duc(model_dir, peer_dir, exper_dir):
models = [item for item in os.listdir(model_dir) if ("sum_" in item and item.endswith("sent.tok")) ]
print models
peers = os.listdir(peer_dir)
config_file = exper_dir+"config.xml"
config = open(config_file, 'w')
config.write('<ROUGE_EVAL version=\"1.5.5\">\n')
count=0
for peer in peers:
peer2=peer
if '.' in peer:
peer2=peer[:peer.find('.')]
if '_' in peer2:
peer2=peer2[:peer2.find('_')]
#print peer2
count+=1
config.write('<EVAL ID=\"'+str(count)+'\">\n')
config.write('<PEER-ROOT>\n')
config.write(peer_dir + '\n')
config.write('</PEER-ROOT>\n')
config.write('<MODEL-ROOT>\n')
config.write(model_dir + '\n')
config.write('</MODEL-ROOT>\n')
config.write('<INPUT-FORMAT TYPE=\"SPL\">\n')
config.write('</INPUT-FORMAT>\n')
config.write('<PEERS>\n')
config.write('<P ID=\"1\">%s</P>\n' %peer)
config.write('</PEERS>\n')
config.write('<MODELS>\n')
modelnames=[item for item in models if peer2 in item]
#print modelnames
for model in modelnames:
config.write('<M ID=\"'+model[-6:-5]+'\">'+model+'</M>\n' )
config.write('</MODELS>\n')
config.write('</EVAL>\n')
config.write('</ROUGE_EVAL>\n')
config.close()
return config_file
def create_config(model_dir, peer_dir, exper_dir):
models = [item for item in os.listdir(model_dir) if (item.endswith("sent.tok")) ]
#print models
peers = os.listdir(peer_dir)
config_file = exper_dir+"config.xml"
config = open(config_file, 'w')
config.write('<ROUGE_EVAL version=\"1.5.5\">\n')
count=0
for peer in peers:
peer2=peer
if '.' in peer:
peer2=peer[:peer.find('.')]
if '_' in peer2:
peer2=peer2[:peer2.find('_')]
#print peer2
count+=1
config.write('<EVAL ID=\"'+str(count)+'\">\n')
config.write('<PEER-ROOT>\n')
config.write(peer_dir + '\n')
config.write('</PEER-ROOT>\n')
config.write('<MODEL-ROOT>\n')
config.write(model_dir + '\n')
config.write('</MODEL-ROOT>\n')
config.write('<INPUT-FORMAT TYPE=\"SPL\">\n')
config.write('</INPUT-FORMAT>\n')
config.write('<PEERS>\n')
config.write('<P ID=\"1\">%s</P>\n' %peer)
config.write('</PEERS>\n')
config.write('<MODELS>\n')
modelnames=[item for item in models if peer2 in item]
#print modelnames
for model in modelnames:
config.write('<M ID=\"'+model[-6:-5]+'\">'+model+'</M>\n' )
config.write('</MODELS>\n')
config.write('</EVAL>\n')
config.write('</ROUGE_EVAL>\n')
config.close()
return config_file
def run_rouge_bytes(config_file, length, outputdir):
print "evaluating...", config_file, outputdir+"score.txt"
rg='perl /home/natschluter/eval_software/ROUGE-1.5.5/ROUGE-1.5.5.pl'
os.system(rg+" -e /home/alonso/tool/ROUGE-1.5.5/data -n 4 -2 4 -u -m -x -c 95 -r 1000 -f A -t 0 -b 665 -a "+config_file+" > "+outputdir+"_score.txt" )
def run_rouge(config_file, length, outputdir):
print "evaluating...", config_file, outputdir+"score.txt"
os.system(rg+" -e /home/alonso/tool/ROUGE-1.5.5/data -n 4 -2 4 -u -m -x -c 95 -r 1000 -f A -t 0 -l "+length+" -a "+config_file+" > "+outputdir+"_score.txt" )
def evaluate_length():
print len(sys.argv)
if len(sys.argv) < 7 or len(sys.argv) > 7:
sys.stderr.write('USAGE: %s <sys_prefix1> <peer_dir2> <length3> <scorefoldername4> <modelfiles5> <dtype6>\n' %sys.argv[0])
sys.exit(1)
modelfiles=sys.argv[5]
if sys.argv[6]=='duc04':
config_file = create_config_duc(modelfiles, sys.argv[2], sys.argv[4]+"/"+sys.argv[1]+"_")
else:
config_file = create_config(modelfiles, sys.argv[2], sys.argv[4]+"/"+sys.argv[1]+"_")
run_rouge(config_file, sys.argv[3], sys.argv[4]+"/"+sys.argv[1]+"_")
def evaluate_bytes():
print len(sys.argv)
if len(sys.argv) < 7 or len(sys.argv) > 7:
sys.stderr.write('USAGE: %s <sys_prefix1> <peer_dir2> <length3> <scorefoldername4> <modelfiles5> <dtype6>\n' %sys.argv[0])
sys.exit(1)
modelfiles=sys.argv[5]
if sys.argv[6]=='duc04':
config_file = create_config_duc(modelfiles, sys.argv[2], sys.argv[4]+"/"+sys.argv[1]+"_")
else:
config_file = create_config(modelfiles, sys.argv[2], sys.argv[4]+"/"+sys.argv[1]+"_")
run_rouge_bytes(config_file, sys.argv[3], sys.argv[4]+"/"+sys.argv[1]+"_")
if __name__ == '__main__':
# evaluate_duc_bytes_Chance()
evaluate_length()
| 34.944444
| 159
| 0.650238
|
785f9f2099dbff3f3c4905a1dbdf1b2cf89e08fe
| 2,535
|
py
|
Python
|
gbpservice/neutron/services/servicechain/plugins/ncp/exceptions.py
|
ashutosh-mishra/my-test
|
51c82af293f291b9182204392e7d21bda27786d1
|
[
"Apache-2.0"
] | null | null | null |
gbpservice/neutron/services/servicechain/plugins/ncp/exceptions.py
|
ashutosh-mishra/my-test
|
51c82af293f291b9182204392e7d21bda27786d1
|
[
"Apache-2.0"
] | null | null | null |
gbpservice/neutron/services/servicechain/plugins/ncp/exceptions.py
|
ashutosh-mishra/my-test
|
51c82af293f291b9182204392e7d21bda27786d1
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Exceptions used by NodeCompositionPlugin and drivers."""
from neutron.common import exceptions
class NodeDriverError(exceptions.NeutronException):
"""Node driver call failed."""
message = _("%(method)s failed.")
class NodeCompositionPluginException(exceptions.NeutronException):
"""Base for node driver exceptions returned to user."""
pass
class PlumbingException(exceptions.NeutronException):
"""Base for node driver exceptions returned to user."""
pass
class NodeCompositionPluginBadRequest(exceptions.BadRequest,
NodeCompositionPluginException):
"""Base for node driver bad request exceptions returned to user."""
pass
class OneSpecPerInstanceAllowed(NodeCompositionPluginBadRequest):
message = _("The Node Composition Plugin only supports one Servicechain"
"Spec per Servicechain Instance.")
class NoDriverAvailableForAction(NodeCompositionPluginBadRequest):
message = _("The Node Composition Plugin can't find any Node Driver "
"available for executing %(action)s on node %(node_id)s. "
"This may be caused by a Servicechain Node misconfiguration "
"or an unsupported Service Profile.")
class ServiceProfileInUseByAnInstance(NodeCompositionPluginBadRequest):
message = _("Cannot update Service Profile %(profile_id)s because it's "
"used by servicechain instance %(instance_id)s.")
class NotAvailablePTGForTargetRequest(PlumbingException):
message = _("PTG of type %(ptg_type)s doesn't exist for service chain "
"instance %(instance)s. However, it is required by the "
"scheduled Node Driver in order to deploy Node %(node)s")
class InuseSpecNodeUpdateNotAllowed(NodeCompositionPluginBadRequest):
message = _("The Node Composition Plugin does not support updating the "
"nodes in an instantiated servicechain spec.")
| 39
| 78
| 0.715582
|
afc44f2cfa648c3668170bab73364079e01636bc
| 7,732
|
py
|
Python
|
app/simulation/Simulation.py
|
shyamTum/CrowdNav
|
93015712f42d6aafc217f3c0362b209134ac5cb1
|
[
"MIT"
] | 15
|
2018-04-27T15:39:57.000Z
|
2021-05-19T20:58:23.000Z
|
app/simulation/Simulation.py
|
shyamTum/CrowdNav
|
93015712f42d6aafc217f3c0362b209134ac5cb1
|
[
"MIT"
] | null | null | null |
app/simulation/Simulation.py
|
shyamTum/CrowdNav
|
93015712f42d6aafc217f3c0362b209134ac5cb1
|
[
"MIT"
] | 15
|
2018-02-03T14:45:48.000Z
|
2020-04-08T07:25:43.000Z
|
import json
import traci
import traci.constants as tc
from app.network.Network import Network
from app.streaming import RTXForword
from colorama import Fore
from app import Config
from app.entitiy.CarRegistry import CarRegistry
from app.logging import info
from app.routing.CustomRouter import CustomRouter
from app.streaming import RTXConnector
import time
# get the current system time
from app.routing.RoutingEdge import RoutingEdge
current_milli_time = lambda: int(round(time.time() * 1000))
class Simulation(object):
""" here we run the simulation in """
# the current tick of the simulation
tick = 0
# last tick time
lastTick = current_milli_time()
@classmethod
def applyFileConfig(cls):
""" reads configs from a json and applies it at realtime to the simulation """
try:
config = json.load(open('./knobs.json'))
CustomRouter.explorationPercentage = config['explorationPercentage']
CustomRouter.averageEdgeDurationFactor = config['averageEdgeDurationFactor']
CustomRouter.maxSpeedAndLengthFactor = config['maxSpeedAndLengthFactor']
CustomRouter.freshnessUpdateFactor = config['freshnessUpdateFactor']
CustomRouter.freshnessCutOffValue = config['freshnessCutOffValue']
CustomRouter.reRouteEveryTicks = config['reRouteEveryTicks']
except:
pass
@classmethod
def start(cls):
""" start the simulation """
info("# Start adding initial cars to the simulation", Fore.MAGENTA)
# apply the configuration from the json file
cls.applyFileConfig()
CarRegistry.applyCarCounter()
cls.loop()
@classmethod
# @profile
def loop(cls):
""" loops the simulation """
# start listening to all cars that arrived at their target
traci.simulation.subscribe((tc.VAR_ARRIVED_VEHICLES_IDS,))
while 1:
# Do one simulation step
cls.tick += 1
traci.simulationStep()
# Log tick duration to kafka
duration = current_milli_time() - cls.lastTick
cls.lastTick = current_milli_time()
msg = dict()
msg["duration"] = duration
RTXForword.publish(msg, Config.kafkaTopicPerformance)
# Check for removed cars and re-add them into the system
for removedCarId in traci.simulation.getSubscriptionResults()[122]:
CarRegistry.findById(removedCarId).setArrived(cls.tick)
timeBeforeCarProcess = current_milli_time()
# let the cars process this step
CarRegistry.processTick(cls.tick)
# log time it takes for routing
msg = dict()
msg["duration"] = current_milli_time() - timeBeforeCarProcess
RTXForword.publish(msg, Config.kafkaTopicRouting)
# if we enable this we get debug information in the sumo-gui using global traveltime
# should not be used for normal running, just for debugging
# if (cls.tick % 10) == 0:
# for e in Network.routingEdges:
# 1) traci.edge.adaptTraveltime(e.id, 100*e.averageDuration/e.predictedDuration)
# traci.edge.adaptTraveltime(e.id, e.averageDuration)
# 3) traci.edge.adaptTraveltime(e.id, (cls.tick-e.lastDurationUpdateTick)) # how old the data is
# real time update of config if we are not in kafka mode
if (cls.tick % 10) == 0:
if Config.kafkaUpdates is False and Config.mqttUpdates is False:
# json mode
cls.applyFileConfig()
else:
# kafka mode
newConf = RTXConnector.checkForNewConfiguration()
if newConf is not None:
if "exploration_percentage" in newConf:
CustomRouter.explorationPercentage = newConf["exploration_percentage"]
print("setting victimsPercentage: " + str(newConf["exploration_percentage"]))
if "route_random_sigma" in newConf:
CustomRouter.routeRandomSigma = newConf["route_random_sigma"]
print("setting routeRandomSigma: " + str(newConf["route_random_sigma"]))
if "max_speed_and_length_factor" in newConf:
CustomRouter.maxSpeedAndLengthFactor = newConf["max_speed_and_length_factor"]
print("setting maxSpeedAndLengthFactor: " + str(newConf["max_speed_and_length_factor"]))
if "average_edge_duration_factor" in newConf:
CustomRouter.averageEdgeDurationFactor = newConf["average_edge_duration_factor"]
print("setting averageEdgeDurationFactor: " + str(newConf["average_edge_duration_factor"]))
if "freshness_update_factor" in newConf:
CustomRouter.freshnessUpdateFactor = newConf["freshness_update_factor"]
print("setting freshnessUpdateFactor: " + str(newConf["freshness_update_factor"]))
if "freshness_cut_off_value" in newConf:
CustomRouter.freshnessCutOffValue = newConf["freshness_cut_off_value"]
print("setting freshnessCutOffValue: " + str(newConf["freshness_cut_off_value"]))
if "re_route_every_ticks" in newConf:
CustomRouter.reRouteEveryTicks = newConf["re_route_every_ticks"]
print("setting reRouteEveryTicks: " + str(newConf["re_route_every_ticks"]))
if "total_car_counter" in newConf:
CarRegistry.totalCarCounter = newConf["total_car_counter"]
CarRegistry.applyCarCounter()
print("setting totalCarCounter: " + str(newConf["total_car_counter"]))
if "edge_average_influence" in newConf:
RoutingEdge.edgeAverageInfluence = newConf["edge_average_influence"]
print("setting edgeAverageInfluence: " + str(newConf["edge_average_influence"]))
# print status update if we are not running in parallel mode
if (cls.tick % 100) == 0 and Config.parallelMode is False:
print(str(Config.processID) + " -> Step:" + str(cls.tick) + " # Driving cars: " + str(
traci.vehicle.getIDCount()) + "/" + str(
CarRegistry.totalCarCounter) + " # avgTripDuration: " + str(
CarRegistry.totalTripAverage) + "(" + str(
CarRegistry.totalTrips) + ")" + " # avgTripOverhead: " + str(
CarRegistry.totalTripOverheadAverage))
# @depricated -> will be removed
# # if we are in paralllel mode we end the simulation after 10000 ticks with a result output
# if (cls.tick % 10000) == 0 and Config.parallelMode:
# # end the simulation here
# print(str(Config.processID) + " -> Step:" + str(cls.tick) + " # Driving cars: " + str(
# traci.vehicle.getIDCount()) + "/" + str(
# CarRegistry.totalCarCounter) + " # avgTripDuration: " + str(
# CarRegistry.totalTripAverage) + "(" + str(
# CarRegistry.totalTrips) + ")" + " # avgTripOverhead: " + str(
# CarRegistry.totalTripOverheadAverage))
# return
| 51.205298
| 119
| 0.595318
|
76da22533e0353c71a98b4d2ddfabc69bd8fdef1
| 761
|
py
|
Python
|
credmaker/openstackrc_maker.py
|
OneOfaKindGeek/mycode
|
bbb4391b333aaa1667314b76393f2102c05a2571
|
[
"Apache-2.0"
] | null | null | null |
credmaker/openstackrc_maker.py
|
OneOfaKindGeek/mycode
|
bbb4391b333aaa1667314b76393f2102c05a2571
|
[
"Apache-2.0"
] | null | null | null |
credmaker/openstackrc_maker.py
|
OneOfaKindGeek/mycode
|
bbb4391b333aaa1667314b76393f2102c05a2571
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
outFile = open("admin.rc", "a")
osAUTH = input("What is the OS_AUTH_URL?")
print("export OS_AUTH_URL=" + osAUTH, file=outFile)
print("export OS_IDENTITY_API_VERSION=3", file=outFile)
osPROJ = input("What is the OS_PROJECT_NAME?")
print("export OS_PROJECT_NAME=" + osPROJ, file=outFile)
osPROJDOM = input("What is the OS_PROJECT_DOMAIN_NAME?")
print("export OS_PROJECT_DOMAIN_NAME=" + osPROJDOM, file=outFile)
osUSER = input("What is the OS_USERNAME?")
print("export OS_USERNAME=" + osUSER, file=outFile)
osUSERDOM = input("What is the OS_USER_DOMAIN_NAME?")
print("export OS_USER_DOMAIN_NAME=" + osUSERDOM, file=outFile)
osPASS = input("What is the OS_PASSWORD?")
print("export OS_PASSWORD=" + osPASS, file=outFile)
outFile.close()
| 33.086957
| 65
| 0.750329
|
08ff141a55bed62f1eca5b7da6a3592616e2f057
| 821
|
py
|
Python
|
backend/benchmark/migrations/0002_systeminfo.py
|
cccgnit/CryptoBenchmark
|
a7690b2b4a3d994b1feb900457ea67a0134bcb5e
|
[
"MIT"
] | null | null | null |
backend/benchmark/migrations/0002_systeminfo.py
|
cccgnit/CryptoBenchmark
|
a7690b2b4a3d994b1feb900457ea67a0134bcb5e
|
[
"MIT"
] | null | null | null |
backend/benchmark/migrations/0002_systeminfo.py
|
cccgnit/CryptoBenchmark
|
a7690b2b4a3d994b1feb900457ea67a0134bcb5e
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-05-14 12:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('benchmark', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='SystemInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('startTime', models.IntegerField(blank=True, null=True)),
('cpuUtilPct', models.FloatField(blank=True, null=True)),
('memUsedPct', models.FloatField(blank=True, null=True)),
('getTotal', models.IntegerField(blank=True, null=True)),
('sentTotal', models.IntegerField(blank=True, null=True)),
],
),
]
| 32.84
| 114
| 0.582217
|
fe847da75577d9988d1709fe12413d544d3e461a
| 3,816
|
py
|
Python
|
monitor/monitor/middlewares.py
|
reynierg/websites-monitor
|
afa67d65f4a3dcef11ef86b068e885689970cdd1
|
[
"MIT"
] | null | null | null |
monitor/monitor/middlewares.py
|
reynierg/websites-monitor
|
afa67d65f4a3dcef11ef86b068e885689970cdd1
|
[
"MIT"
] | null | null | null |
monitor/monitor/middlewares.py
|
reynierg/websites-monitor
|
afa67d65f4a3dcef11ef86b068e885689970cdd1
|
[
"MIT"
] | null | null | null |
"""Define here the models for your spider middleware
See documentation in:
https://docs.scrapy.org/en/latest/topics/spider-middleware.html
"""
# from scrapy import signals
# useful for handling different item types with a single interface
# from itemadapter import is_item, ItemAdapter
# class MonitorSpiderMiddleware:
# # Not all methods need to be defined. If a method is not defined,
# # scrapy acts as if the spider middleware does not modify the
# # passed objects.
#
# @classmethod
# def from_crawler(cls, crawler):
# # This method is used by Scrapy to create your spiders.
# s = cls()
# crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
# return s
#
# def process_spider_input(self, response, spider):
# # Called for each response that goes through the spider
# # middleware and into the spider.
#
# # Should return None or raise an exception.
# return None
#
# def process_spider_output(self, response, result, spider):
# # Called with the results returned from the Spider, after
# # it has processed the response.
#
# # Must return an iterable of Request, or item objects.
# for i in result:
# yield i
#
# def process_spider_exception(self, response, exception, spider):
# # Called when a spider or process_spider_input() method
# # (from other spider middleware) raises an exception.
#
# # Should return either None or an iterable of Request or item objects.
# pass
#
# def process_start_requests(self, start_requests, spider):
# # Called with the start requests of the spider, and works
# # similarly to the process_spider_output() method, except
# # that it doesn’t have a response associated.
#
# # Must return only requests (not items).
# for r in start_requests:
# yield r
#
# def spider_opened(self, spider):
# spider.logger.info('Spider opened: %s' % spider.name)
#
#
# class MonitorDownloaderMiddleware:
# # Not all methods need to be defined. If a method is not defined,
# # scrapy acts as if the downloader middleware does not modify the
# # passed objects.
#
# @classmethod
# def from_crawler(cls, crawler):
# # This method is used by Scrapy to create your spiders.
# s = cls()
# crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)
# return s
#
# def process_request(self, request, spider):
# # Called for each request that goes through the downloader
# # middleware.
#
# # Must either:
# # - return None: continue processing this request
# # - or return a Response object
# # - or return a Request object
# # - or raise IgnoreRequest: process_exception() methods of
# # installed downloader middleware will be called
# return None
#
# def process_response(self, request, response, spider):
# # Called with the response returned from the downloader.
#
# # Must either;
# # - return a Response object
# # - return a Request object
# # - or raise IgnoreRequest
# return response
#
# def process_exception(self, request, exception, spider):
# # Called when a download handler or a process_request()
# # (from other downloader middleware) raises an exception.
#
# # Must either:
# # - return None: continue processing this exception
# # - return a Response object: stops process_exception() chain
# # - return a Request object: stops process_exception() chain
# pass
#
# def spider_opened(self, spider):
# spider.logger.info('Spider opened: %s' % spider.name)
| 36.342857
| 80
| 0.644654
|
1e4e8b51a5abd3265ea38002cbfe0eb846b1f693
| 9,183
|
py
|
Python
|
dev-tools/scripts/checkJavadocLinks.py
|
orenovadia/lucene-solr
|
e8276e09a197c3458fc49b8a4442fce1c7eea792
|
[
"Apache-2.0"
] | 1
|
2021-02-10T18:42:17.000Z
|
2021-02-10T18:42:17.000Z
|
dev-tools/scripts/checkJavadocLinks.py
|
orenovadia/lucene-solr
|
e8276e09a197c3458fc49b8a4442fce1c7eea792
|
[
"Apache-2.0"
] | 2
|
2020-08-05T04:53:15.000Z
|
2020-08-26T08:27:11.000Z
|
dev-tools/scripts/checkJavadocLinks.py
|
orenovadia/lucene-solr
|
e8276e09a197c3458fc49b8a4442fce1c7eea792
|
[
"Apache-2.0"
] | 2
|
2021-02-16T21:31:36.000Z
|
2021-07-22T13:40:45.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
import os
import sys
import re
from html.parser import HTMLParser
import urllib.parse as urlparse
reHyperlink = re.compile(r'<a(\s+.*?)>', re.I)
reAtt = re.compile(r"""(?:\s+([a-z]+)\s*=\s*("[^"]*"|'[^']?'|[^'"\s]+))+""", re.I)
# Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] /* any Unicode character, excluding the surrogate blocks, FFFE, and FFFF. */
reValidChar = re.compile("^[^\u0000-\u0008\u000B-\u000C\u000E-\u001F\uFFFE\uFFFF]*$")
# silly emacs: '
class FindHyperlinks(HTMLParser):
def __init__(self, baseURL):
HTMLParser.__init__(self)
self.stack = []
self.anchors = set()
self.links = []
self.baseURL = baseURL
self.printed = False
def handle_starttag(self, tag, attrs):
# NOTE: I don't think 'a' should be in here. But try debugging
# NumericRangeQuery.html. (Could be javadocs bug, it's a generic type...)
if tag not in ('link', 'meta', 'frame', 'br', 'hr', 'p', 'li', 'img', 'col', 'a', 'dt', 'dd'):
self.stack.append(tag)
if tag == 'a':
id = None
name = None
href = None
for attName, attValue in attrs:
if attName == 'name':
name = attValue
elif attName == 'href':
href = attValue
elif attName == 'id':
id = attValue
if name is not None:
assert href is None
if name in self.anchors:
if name in ('serializedForm',
'serialized_methods',
'readObject(java.io.ObjectInputStream)',
'writeObject(java.io.ObjectOutputStream)') \
and self.baseURL.endswith('/serialized-form.html'):
# Seems like a bug in Javadoc generation... you can't have
# same anchor name more than once...
pass
else:
self.printFile()
raise RuntimeError('anchor "%s" appears more than once' % name)
else:
self.anchors.add(name)
elif href is not None:
assert name is None
href = href.strip()
self.links.append(urlparse.urljoin(self.baseURL, href))
elif id is None:
raise RuntimeError('couldn\'t find an href nor name in link in %s: only got these attrs: %s' % (self.baseURL, attrs))
def handle_endtag(self, tag):
if tag in ('link', 'meta', 'frame', 'br', 'hr', 'p', 'li', 'img', 'col', 'a', 'dt', 'dd'):
return
if len(self.stack) == 0:
raise RuntimeError('%s %s:%s: saw </%s> no opening <%s>' % (self.baseURL, self.getpos()[0], self.getpos()[1], tag, self.stack[-1]))
if self.stack[-1] == tag:
self.stack.pop()
else:
raise RuntimeError('%s %s:%s: saw </%s> but expected </%s>' % (self.baseURL, self.getpos()[0], self.getpos()[1], tag, self.stack[-1]))
def printFile(self):
if not self.printed:
print()
print(' ' + self.baseURL)
self.printed = True
def parse(baseURL, html):
global failures
# look for broken unicode
if not reValidChar.match(html):
print(' WARNING: invalid characters detected in: %s' % baseURL)
failures = True
return [], []
parser = FindHyperlinks(baseURL)
try:
parser.feed(html)
parser.close()
except:
# TODO: Python's html.parser is now always lenient, which is no good for us: we want correct HTML in our javadocs
parser.printFile()
print(' WARNING: failed to parse %s:' % baseURL)
traceback.print_exc(file=sys.stdout)
failures = True
return [], []
#print ' %d links, %d anchors' % \
# (len(parser.links), len(parser.anchors))
return parser.links, parser.anchors
failures = False
def checkAll(dirName):
"""
Checks *.html (recursively) under this directory.
"""
global failures
# Find/parse all HTML files first
print()
print('Crawl/parse...')
allFiles = {}
if os.path.isfile(dirName):
root, fileName = os.path.split(dirName)
iter = ((root, [], [fileName]),)
else:
iter = os.walk(dirName)
for root, dirs, files in iter:
for f in files:
main, ext = os.path.splitext(f)
ext = ext.lower()
# maybe?:
# and main not in ('serialized-form'):
if ext in ('.htm', '.html') and \
not f.startswith('.#') and \
main not in ('deprecated-list',):
# Somehow even w/ java 7 generaged javadocs,
# deprecated-list.html can fail to escape generics types
fullPath = os.path.join(root, f).replace(os.path.sep,'/')
fullPath = 'file:%s' % urlparse.quote(fullPath)
# parse and unparse the URL to "normalize" it
fullPath = urlparse.urlunparse(urlparse.urlparse(fullPath))
#print ' %s' % fullPath
allFiles[fullPath] = parse(fullPath, open('%s/%s' % (root, f), encoding='UTF-8').read())
# ... then verify:
print()
print('Verify...')
for fullPath, (links, anchors) in allFiles.items():
#print fullPath
printed = False
for link in links:
origLink = link
# TODO: use urlparse?
idx = link.find('#')
if idx != -1:
anchor = link[idx+1:]
link = link[:idx]
else:
anchor = None
# remove any whitespace from the middle of the link
link = ''.join(link.split())
idx = link.find('?')
if idx != -1:
link = link[:idx]
# TODO: normalize path sep for windows...
if link.startswith('http://') or link.startswith('https://'):
# don't check external links
if link.find('lucene.apache.org/java/docs/mailinglists.html') != -1:
# OK
pass
elif link == 'http://lucene.apache.org/core/':
# OK
pass
elif link == 'http://lucene.apache.org/solr/':
# OK
pass
elif link == 'http://lucene.apache.org/solr/resources.html':
# OK
pass
elif link.find('lucene.apache.org/java/docs/discussion.html') != -1:
# OK
pass
elif link.find('lucene.apache.org/core/discussion.html') != -1:
# OK
pass
elif link.find('lucene.apache.org/solr/mirrors-solr-latest-redir.html') != -1:
# OK
pass
elif link.find('lucene.apache.org/solr/guide/') != -1:
# OK
pass
elif link.find('lucene.apache.org/solr/downloads.html') != -1:
# OK
pass
elif (link.find('svn.apache.org') != -1
or link.find('lucene.apache.org') != -1)\
and os.path.basename(fullPath) != 'Changes.html':
if not printed:
printed = True
print()
print(fullPath)
print(' BAD EXTERNAL LINK: %s' % link)
elif link.startswith('mailto:'):
if link.find('@lucene.apache.org') == -1 and link.find('@apache.org') != -1:
if not printed:
printed = True
print()
print(fullPath)
print(' BROKEN MAILTO (?): %s' % link)
elif link.startswith('javascript:'):
# ok...?
pass
elif 'org/apache/solr/client/solrj/beans/Field.html' in link:
# see LUCENE-4011: this is a javadocs bug for constants
# on annotations it seems?
pass
elif link.startswith('file:'):
if link not in allFiles:
filepath = urlparse.unquote(urlparse.urlparse(link).path)
if not (os.path.exists(filepath) or os.path.exists(filepath[1:])):
if not printed:
printed = True
print()
print(fullPath)
print(' BROKEN LINK: %s' % link)
elif anchor is not None and anchor not in allFiles[link][1]:
if not printed:
printed = True
print()
print(fullPath)
print(' BROKEN ANCHOR: %s' % origLink)
else:
if not printed:
printed = True
print()
print(fullPath)
print(' BROKEN URL SCHEME: %s' % origLink)
failures = failures or printed
return failures
if __name__ == '__main__':
if checkAll(sys.argv[1]):
print()
print('Broken javadocs links were found! Common root causes:')
# please feel free to add to this list
print('* A typo of some sort for manually created links.')
print('* Public methods referencing non-public classes in their signature.')
sys.exit(1)
sys.exit(0)
| 33.392727
| 158
| 0.579985
|
d4edce09f413417526b0deab21b20da1e2fa97ef
| 16,722
|
py
|
Python
|
richcat/modules/consts/_ext2alias_dic_generator.py
|
richcat-dev/richcat
|
3bb3aba63ca95b7964907d5a4c52312344bd262d
|
[
"MIT"
] | 3
|
2021-12-14T11:01:44.000Z
|
2022-03-30T07:02:29.000Z
|
richcat/modules/consts/_ext2alias_dic_generator.py
|
richcat-dev/richcat
|
3bb3aba63ca95b7964907d5a4c52312344bd262d
|
[
"MIT"
] | 110
|
2020-11-22T15:08:05.000Z
|
2022-03-29T10:59:39.000Z
|
richcat/modules/consts/_ext2alias_dic_generator.py
|
richcat-dev/richcat
|
3bb3aba63ca95b7964907d5a4c52312344bd262d
|
[
"MIT"
] | 1
|
2020-12-20T14:27:32.000Z
|
2020-12-20T14:27:32.000Z
|
"""
richcat.modules._ext2alias_dic_generator
----------------------------------------
Dictionary for convert extension to alias.
This file is generated by itself. Every Time you change something on this dictionary, run this script to update it.
"""
import os
import pickle
import subprocess
from pygments.lexers import get_all_lexers
DIC_LEXER_WC = {
'abap': 'abap',
'ABAP': 'abap',
'apl': 'apl',
'abnf': 'abnf',
'as': 'as',
'adb': 'ada',
'ads': 'ada',
'ada': 'ada',
'adl': 'adl',
'adls': 'adl',
'adlf': 'adl',
'adlx': 'adl',
'agda': 'agda',
'aheui': 'aheui',
'als': 'alloy',
'at': 'at',
'run': 'ampl',
'ng2': 'html+ng2',
'G': 'antlr-ruby',
'g': 'gap',
'applescript': 'applescript',
'ino': 'arduino',
'aj': 'aspectj',
'asy': 'asy',
'aug': 'augeas',
'au3': 'autoit',
'ahk': 'ahk',
'ahkl': 'ahk',
'awk': 'awk',
'bbc': 'bbcbasic',
'bc': 'bc',
'bst': 'bst',
'sh': 'bash',
'ksh': 'bash',
'bash': 'bash',
'ebuild': 'bash',
'eclass': 'bash',
'exheres-0': 'bash',
'exlib': 'bash',
'zsh': 'bash',
'sh-session': 'console',
'shell-session': 'console',
'bat': 'bat',
'cmd': 'bat',
'befunge': 'befunge',
'bib': 'bib',
'bb': 'blitzbasic',
'decls': 'blitzbasic',
'bmx': 'blitzmax',
'bnf': 'bnf',
'boa': 'boa',
'boo': 'boo',
'bpl': 'boogie',
'bf': 'brainfuck',
'b': 'limbo',
'bug': 'jags',
'camkes': 'camkes',
'idl4': 'camkes',
'c': 'c',
'h': 'objective-c',
'idc': 'c',
'cmake': 'cmake',
'c-objdump': 'c-objdump',
'cpsa': 'cpsa',
'aspx': 'aspx-vb',
'asax': 'aspx-vb',
'ascx': 'aspx-vb',
'ashx': 'aspx-vb',
'asmx': 'aspx-vb',
'axd': 'aspx-vb',
'cs': 'csharp',
's': 'gas',
'cadl': 'cadl',
'cdl': 'capdl',
'capnp': 'capnp',
'bas': 'vb.net',
'ceylon': 'ceylon',
'cf': 'cfengine3',
'chai': 'chai',
'chpl': 'chapel',
'ci': 'charmci',
'tmpl': 'cheetah',
'spt': 'cheetah',
'cirru': 'cirru',
'clay': 'clay',
'icl': 'clean',
'dcl': 'clean',
'clj': 'clojure',
'cljs': 'clojurescript',
'cbl': 'cobolfree',
'CBL': 'cobolfree',
'cob': 'cobol',
'COB': 'cobol',
'cpy': 'cobol',
'CPY': 'cobol',
'coffee': 'coffee-script',
'cfc': 'cfc',
'cfm': 'cfm',
'cfml': 'cfm',
'cl': 'common-lisp',
'lisp': 'common-lisp',
'cp': 'cpp',
'cps': 'componentpascal',
'v': 'verilog',
'cpp': 'cpp',
'hpp': 'cpp',
'c++': 'cpp',
'h++': 'cpp',
'cc': 'cpp',
'hh': 'objective-c++',
'cxx': 'cpp',
'hxx': 'cpp',
'C': 'cpp',
'H': 'cpp',
'CPP': 'cpp',
'cpp-objdump': 'cpp-objdump',
'c++-objdump': 'cpp-objdump',
'cxx-objdump': 'cpp-objdump',
'crmsh': 'crmsh',
'pcmk': 'crmsh',
'croc': 'croc',
'cry': 'cryptol',
'cr': 'cr',
'csd': 'csound-document',
'orc': 'csound',
'udo': 'csound',
'sco': 'csound-score',
'css': 'css',
'cu': 'cuda',
'cuh': 'cuda',
'cyp': 'cypher',
'cypher': 'cypher',
'pyx': 'cython',
'pxd': 'cython',
'pxi': 'cython',
'd': 'd',
'di': 'd',
'd-objdump': 'd-objdump',
'dpatch': 'dpatch',
'darcspatch': 'dpatch',
'dart': 'dart',
'dasm16': 'dasm16',
'dasm': 'dasm16',
'pas': 'delphi',
'dpr': 'delphi',
'dg': 'dg',
'diff': 'diff',
'patch': 'diff',
'docker': 'docker',
'dtd': 'dtd',
'duel': 'duel',
'jbst': 'duel',
'dylan-console': 'dylan-console',
'dylan': 'dylan',
'dyl': 'dylan',
'intr': 'dylan',
'lid': 'dylan-lid',
'hdp': 'dylan-lid',
'ecl': 'prolog',
'ec': 'ec',
'eh': 'ec',
'eg': 'earl-grey',
'ezt': 'easytrieve',
'mac': 'easytrieve',
'ebnf': 'ebnf',
'e': 'eiffel',
'ex': 'elixir',
'eex': 'elixir',
'exs': 'elixir',
'elm': 'elm',
'el': 'emacs',
'eml': 'email',
'erl': 'erlang',
'hrl': 'erlang',
'es': 'erlang',
'escript': 'erlang',
'erl-sh': 'erl',
'html': 'html',
'evoque': 'evoque',
'xml': 'xml',
'n': 'nemerle',
'fs': 'forth',
'fsi': 'fsharp',
'factor': 'factor',
'fy': 'fancy',
'fancypack': 'fancy',
'fan': 'fan',
'flx': 'felix',
'flxh': 'felix',
'fnl': 'fennel',
'fish': 'fish',
'load': 'fish',
'flo': 'floscript',
'frt': 'forth',
'f': 'fortranfixed',
'F': 'fortranfixed',
'f03': 'fortran',
'f90': 'fortran',
'F03': 'fortran',
'F90': 'fortran',
'PRG': 'foxpro',
'prg': 'foxpro',
'edp': 'freefem',
'gd': 'gap',
'gi': 'gap',
'gap': 'gap',
'vert': 'glsl',
'frag': 'glsl',
'geo': 'glsl',
'S': 'splus',
'kid': 'genshi',
'pot': 'pot',
'po': 'pot',
'feature': 'cucumber',
'plot': 'gnuplot',
'plt': 'gnuplot',
'go': 'go',
'golo': 'golo',
'gdc': 'gooddata-cl',
'gs': 'gosu',
'gsx': 'gosu',
'gsp': 'gosu',
'vark': 'gosu',
'gst': 'gst',
'[1234567]': 'groff',
'man': 'groff',
'groovy': 'groovy',
'gradle': 'groovy',
'hlsl': 'hlsl',
'hlsli': 'hlsl',
'haml': 'haml',
'handlebars': 'html+handlebars',
'hbs': 'html+handlebars',
'hs': 'haskell',
'hx': 'hx',
'hxsl': 'hx',
'hsail': 'hsail',
'htm': 'html',
'xhtml': 'html',
'xslt': 'xslt',
'phtml': 'html+php',
'hxml': 'haxeml',
'hy': 'hybris',
'hyb': 'hybris',
'pro': 'prolog',
'icon': 'icon',
'ICON': 'icon',
'idr': 'idris',
'ipf': 'igor',
'inf': 'ini',
'i6t': 'i6t',
'ni': 'inform7',
'i7x': 'inform7',
'ini': 'ini',
'cfg': 'ini',
'io': 'io',
'ik': 'ioke',
'weechatlog': 'irc',
'thy': 'isabelle',
'ijs': 'j',
'jag': 'jags',
'j': 'objective-j',
'java': 'java',
'js': 'js',
'jsm': 'js',
'mjs': 'js',
'jcl': 'jcl',
'jsgf': 'jsgf',
'jsonld': 'jsonld',
'json': 'json',
'jsp': 'jsp',
'jl': 'julia',
'juttle': 'juttle',
'kal': 'kal',
'kmsg': 'kmsg',
'dmesg': 'kmsg',
'kk': 'koka',
'kki': 'koka',
'kt': 'kotlin',
'lsl': 'lsl',
'lasso': 'lasso',
'lasso[89]': 'lasso',
'lean': 'lean',
'less': 'less',
'liquid': 'liquid',
'lagda': 'lagda',
'lcry': 'lcry',
'lhs': 'lhs',
'lidr': 'lidr',
'ls': 'live-script',
'll': 'llvm',
'mir': 'llvm-mir',
'x': 'logos',
'xi': 'logos',
'xm': 'logos',
'xmi': 'logos',
'lgt': 'logtalk',
'logtalk': 'logtalk',
'lua': 'lua',
'wlua': 'lua',
'moo': 'moocode',
'mak': 'make',
'mk': 'make',
'mao': 'mako',
'maql': 'maql',
'md': 'md',
'mask': 'mask',
'm': 'octave',
'mhtml': 'mason',
'mc': 'mason',
'mi': 'mason',
'nb': 'mathematica',
'cdf': 'mathematica',
'nbp': 'mathematica',
'ma': 'mathematica',
'ms': 'ms',
'mo': 'modelica',
'def': 'modula2',
'mod': 'modula2',
'monkey': 'monkey',
'mt': 'monte',
'moon': 'moon',
'mos': 'mosel',
'css.in': 'css+mozpreproc',
'js.in': 'javascript+mozpreproc',
'xul.in': 'xul+mozpreproc',
'mq4': 'mql',
'mq5': 'mql',
'mqh': 'mql',
'msc': 'mscgen',
'mu': 'mupad',
'mxml': 'mxml',
'myt': 'myghty',
'ncl': 'ncl',
'nsi': 'nsis',
'nsh': 'nsis',
'asm': 'tasm',
'ASM': 'tasm',
'objdump-intel': 'objdump-nasm',
'nc': 'nesc',
'lsp': 'newlisp',
'nl': 'newlisp',
'kif': 'newlisp',
'ns2': 'newspeak',
'nim': 'nim',
'nimrod': 'nim',
'nit': 'nit',
'nix': 'nixos',
'smv': 'nusmv',
'objdump': 'objdump',
'mm': 'objective-c++',
'ml': 'ocaml',
'mli': 'ocaml',
'mll': 'ocaml',
'mly': 'ocaml',
'odin': 'odin',
'ooc': 'ooc',
'opa': 'opa',
'p': 'pawn',
'cls': 'openedge',
'pan': 'pan',
'psi': 'parasail',
'psl': 'parasail',
'pwn': 'pawn',
'inc': 'pov',
'peg': 'peg',
'pl': 'prolog',
'pm': 'perl',
'nqp': 'perl6',
'p6': 'perl6',
'6pl': 'perl6',
'p6l': 'perl6',
'pl6': 'perl6',
'6pm': 'perl6',
'p6m': 'perl6',
'pm6': 'perl6',
't': 'tads3',
'raku': 'perl6',
'rakumod': 'perl6',
'rakutest': 'perl6',
'rakudoc': 'perl6',
'php': 'php',
'php[345]': 'php',
'pig': 'pig',
'pike': 'pike',
'pmod': 'pike',
'pc': 'pkgconfig',
'pony': 'pony',
'ps': 'postscript',
'eps': 'postscript',
'pov': 'pov',
'ps1': 'powershell',
'psm1': 'powershell',
'praat': 'praat',
'proc': 'praat',
'psc': 'praat',
'prolog': 'prolog',
'properties': 'properties',
'proto': 'protobuf',
'pug': 'pug',
'jade': 'pug',
'pp': 'puppet',
'pypylog': 'pypylog',
'py2tb': 'py2tb',
'py': 'python',
'pyw': 'python',
'jy': 'python',
'sage': 'python',
'sc': 'sc',
'bzl': 'python',
'tac': 'python',
'pytb': 'pytb',
'py3tb': 'pytb',
'BAS': 'qbasic',
'qvto': 'qvto',
'qml': 'qml',
'qbs': 'qml',
'Rout': 'rconsole',
'rnc': 'rnc',
'spec': 'spec',
'rkt': 'racket',
'rktd': 'racket',
'rktl': 'racket',
'rl': 'ragel-ruby',
'Rd': 'rd',
're': 'reason',
'rei': 'reason',
'r': 'rebol',
'r3': 'rebol',
'reb': 'rebol',
'red': 'red',
'reds': 'red',
'cw': 'redcode',
'reg': 'registry',
'rexx': 'rexx',
'rex': 'rexx',
'rx': 'rexx',
'arexx': 'rexx',
'rhtml': 'rhtml',
'ride': 'ride',
'graph': 'roboconf-graph',
'instances': 'roboconf-instances',
'robot': 'robotframework',
'rql': 'rql',
'rsl': 'rsl',
'rst': 'rst',
'rest': 'rst',
'rts': 'rts',
'rb': 'rb',
'rbw': 'rb',
'rake': 'rb',
'gemspec': 'rb',
'rbx': 'rb',
'duby': 'rb',
'rs': 'rust',
'rs.in': 'rust',
'SAS': 'sas',
'sas': 'sas',
'R': 'splus',
'sml': 'sml',
'sig': 'sml',
'fun': 'sml',
'sarl': 'sarl',
'sass': 'sass',
'scala': 'scala',
'scaml': 'scaml',
'scd': 'sc',
'scdoc': 'scdoc',
'scm': 'scheme',
'ss': 'scheme',
'sci': 'scilab',
'sce': 'scilab',
'tst': 'scilab',
'scss': 'scss',
'shex': 'shexc',
'shen': 'shen',
'siv': 'sieve',
'sieve': 'sieve',
'sil': 'silver',
'vpr': 'silver',
'sl': 'slurm',
'slim': 'slim',
'smali': 'smali',
'st': 'smalltalk',
'sgf': 'sgf',
'tpl': 'smarty',
'snobol': 'snobol',
'sbl': 'snowball',
'sol': 'solidity',
'sp': 'sp',
'rq': 'sparql',
'sparql': 'sparql',
'sql': 'tsql',
'sqlite3-console': 'sqlite3',
'ssp': 'ssp',
'stan': 'stan',
'do': 'stata',
'ado': 'stata',
'swift': 'swift',
'swg': 'swig',
'i': 'swig',
'sv': 'systemverilog',
'svh': 'systemverilog',
'tap': 'tap',
'toml': 'toml',
'tasm': 'tasm',
'tcl': 'tcl',
'rvt': 'tcl',
'tcsh': 'tcsh',
'csh': 'tcsh',
'tea': 'tea',
'ttl': 'turtle',
'tf': 'terraform',
'tex': 'tex',
'aux': 'tex',
'toc': 'tex',
'txt': 'text',
'thrift': 'thrift',
'todotxt': 'todotxt',
'treetop': 'treetop',
'tt': 'treetop',
'twig': 'html+twig',
'ts': 'ts',
'tsx': 'ts',
'typoscript': 'typoscript',
'u': 'urbiscript',
'u1': 'ucode',
'u2': 'ucode',
'icn': 'unicon',
'usd': 'usd',
'usda': 'usd',
'vbs': 'vbscript',
'VBS': 'vbscript',
'vcl': 'vcl',
'rpf': 'vgl',
'vala': 'vala',
'vapi': 'vala',
'vb': 'vb.net',
'vm': 'velocity',
'fhtml': 'velocity',
'vhdl': 'vhdl',
'vhd': 'vhdl',
'vim': 'vim',
'wdiff': 'wdiff',
'webidl': 'webidl',
'whiley': 'whiley',
'x10': 'x10',
'xqy': 'xquery',
'xquery': 'xquery',
'xq': 'xquery',
'xql': 'xquery',
'xqm': 'xquery',
'xsl': 'xslt',
'rss': 'xml',
'xsd': 'xml',
'wsdl': 'xml',
'wsf': 'xml',
'xpl': 'xslt',
'xtend': 'xtend',
'xtm': 'extempore',
'sls': 'yaml+jinja',
'yaml': 'yaml',
'yml': 'yaml',
'zeek': 'zeek',
'bro': 'zeek',
'zep': 'zephir',
'zig': 'zig',
}
DIC_LEXER_CONST = {
'.htaccess': 'apacheconf',
'apache.conf': 'apacheconf',
'apache2.conf': 'apacheconf',
'.bashrc': 'bash',
'bashrc': 'bash',
'.bash_*': 'bash',
'bash_*': 'bash',
'zshrc': 'bash',
'.zshrc': 'bash',
'PKGBUILD': 'bash',
'CMakeLists.txt': 'cmake',
'control': 'control',
'Dockerfile': 'docker',
'Pipfile.lock': 'json',
'Kconfig': 'kconfig',
'*Config.in*': 'kconfig',
'external.in*': 'kconfig',
'standard-modules.in': 'kconfig',
'Makefile': 'make',
'makefile': 'make',
'Makefile.*': 'make',
'GNUmakefile': 'make',
'autohandler': 'mason',
'dhandler': 'mason',
'autodelegate': 'myghty',
'nginx.conf': 'nginx',
'pacman.conf': 'pacmanconf',
'SConstruct': 'python',
'SConscript': 'python',
'BUCK': 'python',
'BUILD': 'python',
'BUILD.bazel': 'python',
'WORKSPACE': 'python',
'Rakefile': 'rb',
'Gemfile': 'rb',
'.Rhistory': 'splus',
'.Rprofile': 'splus',
'.Renviron': 'splus',
'sources.list': 'sourceslist',
'squid.conf': 'squidconf',
'Pipfile': 'toml',
'poetry.lock': 'toml',
'termcap': 'termcap',
'termcap.src': 'termcap',
'terminfo': 'terminfo',
'terminfo.src': 'terminfo',
'todo.txt': 'todotxt',
'.vimrc': 'vim',
'.exrc': 'vim',
'.gvimrc': 'vim',
'_vimrc': 'vim',
'_exrc': 'vim',
'_gvimrc': 'vim',
'vimrc': 'vim',
'gvimrc': 'vim',
'xorg.conf': 'xorg.conf',
}
if __name__ == '__main__':
def generate_lexer_dict():
"""
The fFunction generate lexer dict
Returns
-------
dic_lexer_wc : dict
Lexers with wildcard.
- structure: {extension: alias}
dic_lexer_const : dict
Lexers which is constant filename.
- structure: {filename: alias}
See Also
--------
- pygments.lexers.get_all_lexers
- https://github.com/pygments/pygments/blob/master/pygments/lexers/__init__.py
- pygments.lexers._mapping.LEXERS
- https://github.com/pygments/pygments/blob/master/pygments/lexers/_mapping.py
"""
# Generate lexer list[name, aliases, filenames, mimetypes]
lst_lexer = [[lexer[0], lexer[1], filenames, lexer[3]] for lexer in get_all_lexers() for filenames in lexer[2]]
# Split lexer list into extensions with wildcard and const filename
lst_lexer_wc = [lexer for lexer in lst_lexer if '*.' in lexer[2]]
lst_lexer_const = [lexer for lexer in lst_lexer if '*.' not in lexer[2]]
# Generate lexer dict{ext: alias}
dic_lexer_wc = dict(zip(
[lexer[2].replace('*.', '') for lexer in lst_lexer_wc],
[lexer[1][0] for lexer in lst_lexer_wc]
))
dic_lexer_const = dict(zip(
[lexer[2] for lexer in lst_lexer_const],
[lexer[1][0] for lexer in lst_lexer_const]
))
return dic_lexer_wc, dic_lexer_const
def extract_src():
"""
The function extract sourcecode from this file
Returns
-------
header : str
Front part of Sourcecode
footer : str
Rear part of Sourcecode
See Also
--------
- pygments.lexers._mapping.LEXERS
- https://github.com/pygments/pygments/blob/master/pygments/lexers/_mapping.py
"""
with open(__file__) as fp:
content = fp.read()
# replace crnl to nl for Windows.
content = content.replace("\r\n", "\n")
header = content[:content.find('DIC_LEXER_WC = {')]
footer = content[content.find("if __name__ == '__main__':"):]
return header, footer
def main():
# Generate lexer dicts
dic_lexer_wc, dic_lexer_const = generate_lexer_dict()
# Extract sourcecode from this file
header, footer = extract_src()
# Format dicts for writing
format_lexer_wc = ['%r: %r' % (k, v) for k, v in dic_lexer_wc.items()]
format_lexer_const = ['%r: %r' % (k, v) for k, v in dic_lexer_const.items()]
# Write dict to this file
with open(__file__, 'w') as fp:
fp.write(header)
fp.write('DIC_LEXER_WC = {\n %s,\n}\n\n' % ',\n '.join(format_lexer_wc))
fp.write('DIC_LEXER_CONST = {\n %s,\n}\n\n' % ',\n '.join(format_lexer_const))
fp.write(footer)
main()
print('Finish!')
| 23.096685
| 119
| 0.462265
|
8e67599abdf0264d72fe0d6ac4513fcd7f28ac82
| 5,183
|
py
|
Python
|
src/garage/tf/algos/trpo.py
|
bainro/garage
|
c5afbb19524792d9bbad9b9741f45e1d48ddca3d
|
[
"MIT"
] | null | null | null |
src/garage/tf/algos/trpo.py
|
bainro/garage
|
c5afbb19524792d9bbad9b9741f45e1d48ddca3d
|
[
"MIT"
] | null | null | null |
src/garage/tf/algos/trpo.py
|
bainro/garage
|
c5afbb19524792d9bbad9b9741f45e1d48ddca3d
|
[
"MIT"
] | null | null | null |
"""Trust Region Policy Optimization."""
from garage.tf.algos.npo import NPO
from garage.tf.optimizers import ConjugateGradientOptimizer
from garage.tf.optimizers import PenaltyLbfgsOptimizer
class TRPO(NPO):
"""Trust Region Policy Optimization.
See https://arxiv.org/abs/1502.05477.
Args:
env_spec (garage.envs.EnvSpec): Environment specification.
policy (garage.tf.policies.base.Policy): Policy.
baseline (garage.tf.baselines.Baseline): The baseline.
scope (str): Scope for identifying the algorithm.
Must be specified if running multiple algorithms
simultaneously, each using different environments
and policies.
max_path_length (int): Maximum length of a single rollout.
discount (float): Discount.
gae_lambda (float): Lambda used for generalized advantage
estimation.
center_adv (bool): Whether to rescale the advantages
so that they have mean 0 and standard deviation 1.
positive_adv (bool): Whether to shift the advantages
so that they are always positive. When used in
conjunction with center_adv the advantages will be
standardized before shifting.
fixed_horizon (bool): Whether to fix horizon.
lr_clip_range (float): The limit on the likelihood ratio between
policies, as in PPO.
max_kl_step (float): The maximum KL divergence between old and new
policies, as in TRPO.
optimizer (object): The optimizer of the algorithm. Should be the
optimizers in garage.tf.optimizers.
optimizer_args (dict): The arguments of the optimizer.
policy_ent_coeff (float): The coefficient of the policy entropy.
Setting it to zero would mean no entropy regularization.
use_softplus_entropy (bool): Whether to estimate the softmax
distribution of the entropy to prevent the entropy from being
negative.
use_neg_logli_entropy (bool): Whether to estimate the entropy as the
negative log likelihood of the action.
stop_entropy_gradient (bool): Whether to stop the entropy gradient.
kl_constraint (str): KL constraint, either 'hard' or 'soft'.
entropy_method (str): A string from: 'max', 'regularized',
'no_entropy'. The type of entropy method to use. 'max' adds the
dense entropy to the reward for each time step. 'regularized' adds
the mean entropy to the surrogate objective. See
https://arxiv.org/abs/1805.00909 for more details.
flatten_input (bool): Whether to flatten input along the observation
dimension. If True, for example, an observation with shape (2, 4)
will be flattened to 8.
name (str): The name of the algorithm.
"""
def __init__(self,
env_spec,
policy,
baseline,
scope=None,
max_path_length=500,
discount=0.99,
gae_lambda=0.98,
center_adv=True,
positive_adv=False,
fixed_horizon=False,
lr_clip_range=0.01,
max_kl_step=0.01,
optimizer=None,
optimizer_args=None,
policy_ent_coeff=0.0,
use_softplus_entropy=False,
use_neg_logli_entropy=False,
stop_entropy_gradient=False,
kl_constraint='hard',
entropy_method='no_entropy',
flatten_input=True,
name='TRPO'):
if not optimizer:
if kl_constraint == 'hard':
optimizer = ConjugateGradientOptimizer
elif kl_constraint == 'soft':
optimizer = PenaltyLbfgsOptimizer
else:
raise ValueError('Invalid kl_constraint')
if optimizer_args is None:
optimizer_args = dict()
super().__init__(env_spec=env_spec,
policy=policy,
baseline=baseline,
scope=scope,
max_path_length=max_path_length,
discount=discount,
gae_lambda=gae_lambda,
center_adv=center_adv,
positive_adv=positive_adv,
fixed_horizon=fixed_horizon,
pg_loss='surrogate',
lr_clip_range=lr_clip_range,
max_kl_step=max_kl_step,
optimizer=optimizer,
optimizer_args=optimizer_args,
policy_ent_coeff=policy_ent_coeff,
use_softplus_entropy=use_softplus_entropy,
use_neg_logli_entropy=use_neg_logli_entropy,
stop_entropy_gradient=stop_entropy_gradient,
entropy_method=entropy_method,
flatten_input=flatten_input,
name=name)
| 45.069565
| 78
| 0.582867
|
a25f9decd7290a83210b126e034fea662276d252
| 23,425
|
py
|
Python
|
vnpy/gateway/ctptest/ctptest_gateway.py
|
JonnyORZ/vnpy
|
c3bb624d95625412a2dd593326abf3833321d2e2
|
[
"MIT"
] | 11
|
2019-11-18T06:07:16.000Z
|
2020-10-12T11:36:21.000Z
|
vnpy/gateway/ctptest/ctptest_gateway.py
|
dovnekai/vnpy
|
222475fdf97f77f60cec4ecee231f1b85f44df21
|
[
"MIT"
] | 2
|
2019-07-17T09:39:34.000Z
|
2019-10-19T16:21:55.000Z
|
vnpy/gateway/ctptest/ctptest_gateway.py
|
dovnekai/vnpy
|
222475fdf97f77f60cec4ecee231f1b85f44df21
|
[
"MIT"
] | 6
|
2019-10-30T14:52:21.000Z
|
2021-01-11T05:41:17.000Z
|
"""
"""
from datetime import datetime
from .vnctpmd import MdApi
from .vnctptd import TdApi
from .ctp_constant import (
THOST_FTDC_OAS_Submitted,
THOST_FTDC_OAS_Accepted,
THOST_FTDC_OAS_Rejected,
THOST_FTDC_OST_NoTradeQueueing,
THOST_FTDC_OST_PartTradedQueueing,
THOST_FTDC_OST_AllTraded,
THOST_FTDC_OST_Canceled,
THOST_FTDC_D_Buy,
THOST_FTDC_D_Sell,
THOST_FTDC_PD_Long,
THOST_FTDC_PD_Short,
THOST_FTDC_OPT_LimitPrice,
THOST_FTDC_OPT_AnyPrice,
THOST_FTDC_OF_Open,
THOST_FTDC_OFEN_Close,
THOST_FTDC_OFEN_CloseYesterday,
THOST_FTDC_OFEN_CloseToday,
THOST_FTDC_PC_Futures,
THOST_FTDC_PC_Options,
THOST_FTDC_PC_Combination,
THOST_FTDC_CP_CallOptions,
THOST_FTDC_CP_PutOptions,
THOST_FTDC_HF_Speculation,
THOST_FTDC_CC_Immediately,
THOST_FTDC_FCC_NotForceClose,
THOST_FTDC_TC_GFD,
THOST_FTDC_VC_AV,
THOST_FTDC_TC_IOC,
THOST_FTDC_VC_CV,
THOST_FTDC_AF_Delete
)
from vnpy.trader.constant import (
Direction,
Offset,
Exchange,
OrderType,
Product,
Status,
OptionType
)
from vnpy.trader.gateway import BaseGateway
from vnpy.trader.object import (
TickData,
OrderData,
TradeData,
PositionData,
AccountData,
ContractData,
OrderRequest,
CancelRequest,
SubscribeRequest,
)
from vnpy.trader.utility import get_folder_path
from vnpy.trader.event import EVENT_TIMER
STATUS_CTP2VT = {
THOST_FTDC_OAS_Submitted: Status.SUBMITTING,
THOST_FTDC_OAS_Accepted: Status.SUBMITTING,
THOST_FTDC_OAS_Rejected: Status.REJECTED,
THOST_FTDC_OST_NoTradeQueueing: Status.NOTTRADED,
THOST_FTDC_OST_PartTradedQueueing: Status.PARTTRADED,
THOST_FTDC_OST_AllTraded: Status.ALLTRADED,
THOST_FTDC_OST_Canceled: Status.CANCELLED
}
DIRECTION_VT2CTP = {
Direction.LONG: THOST_FTDC_D_Buy,
Direction.SHORT: THOST_FTDC_D_Sell
}
DIRECTION_CTP2VT = {v: k for k, v in DIRECTION_VT2CTP.items()}
DIRECTION_CTP2VT[THOST_FTDC_PD_Long] = Direction.LONG
DIRECTION_CTP2VT[THOST_FTDC_PD_Short] = Direction.SHORT
ORDERTYPE_VT2CTP = {
OrderType.LIMIT: THOST_FTDC_OPT_LimitPrice,
OrderType.MARKET: THOST_FTDC_OPT_AnyPrice
}
ORDERTYPE_CTP2VT = {v: k for k, v in ORDERTYPE_VT2CTP.items()}
OFFSET_VT2CTP = {
Offset.OPEN: THOST_FTDC_OF_Open,
Offset.CLOSE: THOST_FTDC_OFEN_Close,
Offset.CLOSETODAY: THOST_FTDC_OFEN_CloseToday,
Offset.CLOSEYESTERDAY: THOST_FTDC_OFEN_CloseYesterday,
}
OFFSET_CTP2VT = {v: k for k, v in OFFSET_VT2CTP.items()}
EXCHANGE_CTP2VT = {
"CFFEX": Exchange.CFFEX,
"SHFE": Exchange.SHFE,
"CZCE": Exchange.CZCE,
"DCE": Exchange.DCE,
"INE": Exchange.INE
}
PRODUCT_CTP2VT = {
THOST_FTDC_PC_Futures: Product.FUTURES,
THOST_FTDC_PC_Options: Product.OPTION,
THOST_FTDC_PC_Combination: Product.SPREAD
}
OPTIONTYPE_CTP2VT = {
THOST_FTDC_CP_CallOptions: OptionType.CALL,
THOST_FTDC_CP_PutOptions: OptionType.PUT
}
symbol_exchange_map = {}
symbol_name_map = {}
symbol_size_map = {}
class CtptestGateway(BaseGateway):
"""
VN Trader Gateway for CTP Test Environment (6.3.13).
"""
default_setting = {
"用户名": "",
"密码": "",
"经纪商代码": "",
"交易服务器": "",
"行情服务器": "",
"产品名称": "",
"授权编码": "",
"产品信息": ""
}
exchanges = list(EXCHANGE_CTP2VT.values())
def __init__(self, event_engine):
"""Constructor"""
super().__init__(event_engine, "CTPTEST")
self.td_api = CtpTdApi(self)
self.md_api = CtpMdApi(self)
def connect(self, setting: dict):
""""""
userid = setting["用户名"]
password = setting["密码"]
brokerid = setting["经纪商代码"]
td_address = setting["交易服务器"]
md_address = setting["行情服务器"]
appid = setting["产品名称"]
auth_code = setting["授权编码"]
product_info = setting["产品信息"]
if not td_address.startswith("tcp://"):
td_address = "tcp://" + td_address
if not md_address.startswith("tcp://"):
md_address = "tcp://" + md_address
self.td_api.connect(td_address, userid, password, brokerid, auth_code, appid, product_info)
self.md_api.connect(md_address, userid, password, brokerid)
self.init_query()
def subscribe(self, req: SubscribeRequest):
""""""
self.md_api.subscribe(req)
def send_order(self, req: OrderRequest):
""""""
return self.td_api.send_order(req)
def cancel_order(self, req: CancelRequest):
""""""
self.td_api.cancel_order(req)
def query_account(self):
""""""
self.td_api.query_account()
def query_position(self):
""""""
self.td_api.query_position()
def close(self):
""""""
self.td_api.close()
self.md_api.close()
def write_error(self, msg: str, error: dict):
""""""
error_id = error["ErrorID"]
error_msg = error["ErrorMsg"]
msg = f"{msg},代码:{error_id},信息:{error_msg}"
self.write_log(msg)
def process_timer_event(self, event):
""""""
self.count += 1
if self.count < 2:
return
self.count = 0
func = self.query_functions.pop(0)
func()
self.query_functions.append(func)
def init_query(self):
""""""
self.count = 0
self.query_functions = [self.query_account, self.query_position]
self.event_engine.register(EVENT_TIMER, self.process_timer_event)
class CtpMdApi(MdApi):
""""""
def __init__(self, gateway):
"""Constructor"""
super(CtpMdApi, self).__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.reqid = 0
self.connect_status = False
self.login_status = False
self.subscribed = set()
self.userid = ""
self.password = ""
self.brokerid = ""
def onFrontConnected(self):
"""
Callback when front server is connected.
"""
self.gateway.write_log("行情服务器连接成功")
self.login()
def onFrontDisconnected(self, reason: int):
"""
Callback when front server is disconnected.
"""
self.login_status = False
self.gateway.write_log(f"行情服务器连接断开,原因{reason}")
def onRspUserLogin(self, data: dict, error: dict, reqid: int, last: bool):
"""
Callback when user is logged in.
"""
if not error["ErrorID"]:
self.login_status = True
self.gateway.write_log("行情服务器登录成功")
for symbol in self.subscribed:
self.subscribeMarketData(symbol)
else:
self.gateway.write_error("行情服务器登录失败", error)
def onRspError(self, error: dict, reqid: int, last: bool):
"""
Callback when error occured.
"""
self.gateway.write_error("行情接口报错", error)
def onRspSubMarketData(self, data: dict, error: dict, reqid: int, last: bool):
""""""
if not error or not error["ErrorID"]:
return
self.gateway.write_error("行情订阅失败", error)
def onRtnDepthMarketData(self, data: dict):
"""
Callback of tick data update.
"""
symbol = data["InstrumentID"]
exchange = symbol_exchange_map.get(symbol, "")
if not exchange:
return
timestamp = f"{data['ActionDay']} {data['UpdateTime']}.{int(data['UpdateMillisec']/100)}"
tick = TickData(
symbol=symbol,
exchange=exchange,
datetime=datetime.strptime(timestamp, "%Y%m%d %H:%M:%S.%f"),
name=symbol_name_map[symbol],
volume=data["Volume"],
last_price=data["LastPrice"],
limit_up=data["UpperLimitPrice"],
limit_down=data["LowerLimitPrice"],
open_price=data["OpenPrice"],
high_price=data["HighestPrice"],
low_price=data["LowestPrice"],
pre_close=data["PreClosePrice"],
bid_price_1=data["BidPrice1"],
ask_price_1=data["AskPrice1"],
bid_volume_1=data["BidVolume1"],
ask_volume_1=data["AskVolume1"],
gateway_name=self.gateway_name
)
self.gateway.on_tick(tick)
def connect(self, address: str, userid: str, password: str, brokerid: int):
"""
Start connection to server.
"""
self.userid = userid
self.password = password
self.brokerid = brokerid
# If not connected, then start connection first.
if not self.connect_status:
path = get_folder_path(self.gateway_name.lower())
self.createFtdcMdApi(str(path) + "\\Md")
self.registerFront(address)
self.init()
self.connect_status = True
# If already connected, then login immediately.
elif not self.login_status:
self.login()
def login(self):
"""
Login onto server.
"""
req = {
"UserID": self.userid,
"Password": self.password,
"BrokerID": self.brokerid
}
self.reqid += 1
self.reqUserLogin(req, self.reqid)
def subscribe(self, req: SubscribeRequest):
"""
Subscribe to tick data update.
"""
if self.login_status:
self.subscribeMarketData(req.symbol)
self.subscribed.add(req.symbol)
def close(self):
"""
Close the connection.
"""
if self.connect_status:
self.exit()
class CtpTdApi(TdApi):
""""""
def __init__(self, gateway):
"""Constructor"""
super(CtpTdApi, self).__init__()
self.gateway = gateway
self.gateway_name = gateway.gateway_name
self.reqid = 0
self.order_ref = 0
self.connect_status = False
self.login_status = False
self.auth_staus = False
self.login_failed = False
self.userid = ""
self.password = ""
self.brokerid = ""
self.auth_code = ""
self.appid = ""
self.product_info = ""
self.frontid = 0
self.sessionid = 0
self.order_data = []
self.trade_data = []
self.positions = {}
self.sysid_orderid_map = {}
def onFrontConnected(self):
""""""
self.gateway.write_log("交易服务器连接成功")
if self.auth_code:
self.authenticate()
else:
self.login()
def onFrontDisconnected(self, reason: int):
""""""
self.login_status = False
self.gateway.write_log(f"交易服务器连接断开,原因{reason}")
def onRspAuthenticate(self, data: dict, error: dict, reqid: int, last: bool):
""""""
if not error['ErrorID']:
self.auth_staus = True
self.gateway.write_log("交易服务器授权验证成功")
self.login()
else:
self.gateway.write_error("交易服务器授权验证失败", error)
def onRspUserLogin(self, data: dict, error: dict, reqid: int, last: bool):
""""""
if not error["ErrorID"]:
self.frontid = data["FrontID"]
self.sessionid = data["SessionID"]
self.login_status = True
self.gateway.write_log("交易服务器登录成功")
# Confirm settlement
req = {
"BrokerID": self.brokerid,
"InvestorID": self.userid
}
self.reqid += 1
self.reqSettlementInfoConfirm(req, self.reqid)
else:
self.login_failed = True
self.gateway.write_error("交易服务器登录失败", error)
def onRspOrderInsert(self, data: dict, error: dict, reqid: int, last: bool):
""""""
order_ref = data["OrderRef"]
orderid = f"{self.frontid}_{self.sessionid}_{order_ref}"
symbol = data["InstrumentID"]
exchange = symbol_exchange_map[symbol]
order = OrderData(
symbol=symbol,
exchange=exchange,
orderid=orderid,
direction=DIRECTION_CTP2VT[data["Direction"]],
offset=OFFSET_CTP2VT.get(data["CombOffsetFlag"], Offset.NONE),
price=data["LimitPrice"],
volume=data["VolumeTotalOriginal"],
status=Status.REJECTED,
gateway_name=self.gateway_name
)
self.gateway.on_order(order)
self.gateway.write_error("交易委托失败", error)
def onRspOrderAction(self, data: dict, error: dict, reqid: int, last: bool):
""""""
self.gateway.write_error("交易撤单失败", error)
def onRspQueryMaxOrderVolume(self, data: dict, error: dict, reqid: int, last: bool):
""""""
pass
def onRspSettlementInfoConfirm(self, data: dict, error: dict, reqid: int, last: bool):
"""
Callback of settlment info confimation.
"""
self.gateway.write_log("结算信息确认成功")
self.reqid += 1
self.reqQryInstrument({}, self.reqid)
def onRspQryInvestorPosition(self, data: dict, error: dict, reqid: int, last: bool):
""""""
if not data:
return
# Get buffered position object
key = f"{data['InstrumentID'], data['PosiDirection']}"
position = self.positions.get(key, None)
if not position:
position = PositionData(
symbol=data["InstrumentID"],
exchange=symbol_exchange_map[data["InstrumentID"]],
direction=DIRECTION_CTP2VT[data["PosiDirection"]],
gateway_name=self.gateway_name
)
self.positions[key] = position
# For SHFE position data update
if position.exchange == Exchange.SHFE:
if data["YdPosition"] and not data["TodayPosition"]:
position.yd_volume = data["Position"]
# For other exchange position data update
else:
position.yd_volume = data["Position"] - data["TodayPosition"]
# Get contract size (spread contract has no size value)
size = symbol_size_map.get(position.symbol, 0)
# Calculate previous position cost
cost = position.price * position.volume * size
# Update new position volume
position.volume += data["Position"]
position.pnl += data["PositionProfit"]
# Calculate average position price
if position.volume and size:
cost += data["PositionCost"]
position.price = cost / (position.volume * size)
# Get frozen volume
if position.direction == Direction.LONG:
position.frozen += data["ShortFrozen"]
else:
position.frozen += data["LongFrozen"]
if last:
for position in self.positions.values():
self.gateway.on_position(position)
self.positions.clear()
def onRspQryTradingAccount(self, data: dict, error: dict, reqid: int, last: bool):
""""""
if "AccountID" not in data:
return
account = AccountData(
accountid=data["AccountID"],
balance=data["Balance"],
frozen=data["FrozenMargin"] + data["FrozenCash"] + data["FrozenCommission"],
gateway_name=self.gateway_name
)
account.available = data["Available"]
self.gateway.on_account(account)
def onRspQryInstrument(self, data: dict, error: dict, reqid: int, last: bool):
"""
Callback of instrument query.
"""
product = PRODUCT_CTP2VT.get(data["ProductClass"], None)
if product:
contract = ContractData(
symbol=data["InstrumentID"],
exchange=EXCHANGE_CTP2VT[data["ExchangeID"]],
name=data["InstrumentName"],
product=product,
size=data["VolumeMultiple"],
pricetick=data["PriceTick"],
gateway_name=self.gateway_name
)
# For option only
if contract.product == Product.OPTION:
contract.option_underlying = data["UnderlyingInstrID"],
contract.option_type = OPTIONTYPE_CTP2VT.get(data["OptionsType"], None),
contract.option_strike = data["StrikePrice"],
contract.option_expiry = datetime.strptime(data["ExpireDate"], "%Y%m%d"),
self.gateway.on_contract(contract)
symbol_exchange_map[contract.symbol] = contract.exchange
symbol_name_map[contract.symbol] = contract.name
symbol_size_map[contract.symbol] = contract.size
if last:
self.gateway.write_log("合约信息查询成功")
for data in self.order_data:
self.onRtnOrder(data)
self.order_data.clear()
for data in self.trade_data:
self.onRtnTrade(data)
self.trade_data.clear()
def onRtnOrder(self, data: dict):
"""
Callback of order status update.
"""
symbol = data["InstrumentID"]
exchange = symbol_exchange_map.get(symbol, "")
if not exchange:
self.order_data.append(data)
return
frontid = data["FrontID"]
sessionid = data["SessionID"]
order_ref = data["OrderRef"]
orderid = f"{frontid}_{sessionid}_{order_ref}"
order = OrderData(
symbol=symbol,
exchange=exchange,
orderid=orderid,
type=ORDERTYPE_CTP2VT[data["OrderPriceType"]],
direction=DIRECTION_CTP2VT[data["Direction"]],
offset=OFFSET_CTP2VT[data["CombOffsetFlag"]],
price=data["LimitPrice"],
volume=data["VolumeTotalOriginal"],
traded=data["VolumeTraded"],
status=STATUS_CTP2VT[data["OrderStatus"]],
time=data["InsertTime"],
gateway_name=self.gateway_name
)
self.gateway.on_order(order)
self.sysid_orderid_map[data["OrderSysID"]] = orderid
def onRtnTrade(self, data: dict):
"""
Callback of trade status update.
"""
symbol = data["InstrumentID"]
exchange = symbol_exchange_map.get(symbol, "")
if not exchange:
self.trade_data.append(data)
return
orderid = self.sysid_orderid_map[data["OrderSysID"]]
trade = TradeData(
symbol=symbol,
exchange=exchange,
orderid=orderid,
tradeid=data["TradeID"],
direction=DIRECTION_CTP2VT[data["Direction"]],
offset=OFFSET_CTP2VT[data["OffsetFlag"]],
price=data["Price"],
volume=data["Volume"],
time=data["TradeTime"],
gateway_name=self.gateway_name
)
self.gateway.on_trade(trade)
def connect(
self,
address: str,
userid: str,
password: str,
brokerid: int,
auth_code: str,
appid: str,
product_info
):
"""
Start connection to server.
"""
self.userid = userid
self.password = password
self.brokerid = brokerid
self.auth_code = auth_code
self.appid = appid
self.product_info = product_info
if not self.connect_status:
path = get_folder_path(self.gateway_name.lower())
self.createFtdcTraderApi(str(path) + "\\Td")
self.subscribePrivateTopic(0)
self.subscribePublicTopic(0)
self.registerFront(address)
self.init()
self.connect_status = True
else:
self.authenticate()
def authenticate(self):
"""
Authenticate with auth_code and appid.
"""
req = {
"UserID": self.userid,
"BrokerID": self.brokerid,
"AuthCode": self.auth_code,
"AppID": self.appid
}
if self.product_info:
req["UserProductInfo"] = self.product_info
self.reqid += 1
self.reqAuthenticate(req, self.reqid)
def login(self):
"""
Login onto server.
"""
if self.login_failed:
return
req = {
"UserID": self.userid,
"Password": self.password,
"BrokerID": self.brokerid,
"AppID": self.appid
}
if self.product_info:
req["UserProductInfo"] = self.product_info
self.reqid += 1
self.reqUserLogin(req, self.reqid)
def send_order(self, req: OrderRequest):
"""
Send new order.
"""
self.order_ref += 1
ctp_req = {
"InstrumentID": req.symbol,
"LimitPrice": req.price,
"VolumeTotalOriginal": int(req.volume),
"OrderPriceType": ORDERTYPE_VT2CTP.get(req.type, ""),
"Direction": DIRECTION_VT2CTP.get(req.direction, ""),
"CombOffsetFlag": OFFSET_VT2CTP.get(req.offset, ""),
"OrderRef": str(self.order_ref),
"InvestorID": self.userid,
"UserID": self.userid,
"BrokerID": self.brokerid,
"CombHedgeFlag": THOST_FTDC_HF_Speculation,
"ContingentCondition": THOST_FTDC_CC_Immediately,
"ForceCloseReason": THOST_FTDC_FCC_NotForceClose,
"IsAutoSuspend": 0,
"TimeCondition": THOST_FTDC_TC_GFD,
"VolumeCondition": THOST_FTDC_VC_AV,
"MinVolume": 1
}
if req.type == OrderType.FAK:
ctp_req["OrderPriceType"] = THOST_FTDC_OPT_LimitPrice
ctp_req["TimeCondition"] = THOST_FTDC_TC_IOC
ctp_req["VolumeCondition"] = THOST_FTDC_VC_AV
elif req.type == OrderType.FOK:
ctp_req["OrderPriceType"] = THOST_FTDC_OPT_LimitPrice
ctp_req["TimeCondition"] = THOST_FTDC_TC_IOC
ctp_req["VolumeCondition"] = THOST_FTDC_VC_CV
self.reqid += 1
self.reqOrderInsert(ctp_req, self.reqid)
orderid = f"{self.frontid}_{self.sessionid}_{self.order_ref}"
order = req.create_order_data(orderid, self.gateway_name)
self.gateway.on_order(order)
return order.vt_orderid
def cancel_order(self, req: CancelRequest):
"""
Cancel existing order.
"""
frontid, sessionid, order_ref = req.orderid.split("_")
ctp_req = {
"InstrumentID": req.symbol,
"Exchange": req.exchange,
"OrderRef": order_ref,
"FrontID": int(frontid),
"SessionID": int(sessionid),
"ActionFlag": THOST_FTDC_AF_Delete,
"BrokerID": self.brokerid,
"InvestorID": self.userid
}
self.reqid += 1
self.reqOrderAction(ctp_req, self.reqid)
def query_account(self):
"""
Query account balance data.
"""
self.reqid += 1
self.reqQryTradingAccount({}, self.reqid)
def query_position(self):
"""
Query position holding data.
"""
if not symbol_exchange_map:
return
req = {
"BrokerID": self.brokerid,
"InvestorID": self.userid
}
self.reqid += 1
self.reqQryInvestorPosition(req, self.reqid)
def close(self):
""""""
if self.connect_status:
self.exit()
| 29.171856
| 99
| 0.584418
|
197429d502191499946724c2e4c3f0d7128b4d7e
| 844
|
py
|
Python
|
tests/utils/test_date.py
|
szilvajuhos/scout
|
2f4a03fb3192a57c99fd62be626e8c22051e81af
|
[
"BSD-3-Clause"
] | 1
|
2019-08-17T21:20:04.000Z
|
2019-08-17T21:20:04.000Z
|
tests/utils/test_date.py
|
szilvajuhos/scout
|
2f4a03fb3192a57c99fd62be626e8c22051e81af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/utils/test_date.py
|
szilvajuhos/scout
|
2f4a03fb3192a57c99fd62be626e8c22051e81af
|
[
"BSD-3-Clause"
] | null | null | null |
import datetime
import pytest
from scout.utils.date import (match_date, get_date)
def test_match_date_dash():
date_str = "2015-05-10"
assert match_date(date_str)
def test_match_date_dot():
date_str = "2015.05.10"
assert match_date(date_str)
def test_match_invalid_date():
date_str = "20150510"
assert not match_date(date_str)
def test_match_invalid_date():
date_str = "hello"
assert not match_date(date_str)
def test_valid_date():
date_str = "2015-05-10"
date_obj = get_date(date_str)
assert isinstance(date_obj, datetime.datetime)
def test_valid_date_no_date():
date_str = None
date_obj = get_date(date_str)
assert isinstance(date_obj, datetime.datetime)
def test_invalid_date():
date_str = "20150510"
with pytest.raises(ValueError):
date_obj = get_date(date_str)
| 24.114286
| 51
| 0.727488
|
f3bd92c097acd7ade5514a6a41068518db3bc954
| 1,885
|
py
|
Python
|
aceql/progress_indicator.py
|
GuillaumeR77/aceql-http-client-python
|
a7f46cf2f72095d41ce4d946e2e55e6a4007f79e
|
[
"Apache-2.0"
] | null | null | null |
aceql/progress_indicator.py
|
GuillaumeR77/aceql-http-client-python
|
a7f46cf2f72095d41ce4d946e2e55e6a4007f79e
|
[
"Apache-2.0"
] | null | null | null |
aceql/progress_indicator.py
|
GuillaumeR77/aceql-http-client-python
|
a7f46cf2f72095d41ce4d946e2e55e6a4007f79e
|
[
"Apache-2.0"
] | null | null | null |
#
# This file is part of AceQL Python Client SDK.
# AceQL Python Client SDK: Remote SQL access over HTTP with AceQL HTTP.
# Copyright (C) 2021, KawanSoft SAS
# (http://www.kawansoft.com). All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from aceql._private.parms import Parms
class ProgressIndicator(object):
"""Class that holds a progress when transferring a Blob."""
def __init__(self):
self.__percent = 0
self.__cancelled = False
#
# Private methods,, not to be called.
#
def _increment(self):
""" Called by AceQL internal code during transfer progress."""
if self.__percent < 99:
self.__percent += 1
if Parms.PRINT_PROGRESS_INDICATOR:
print(str(self.__percent) + "%")
def _set_complete(self):
""" To be called by upload/download AceQL internal code when transfer is complete."""
self.__percent = 100
#
# Public methods
#
@property
def get_percent(self) -> int:
""" Allows to get transfer progress from 0 to 100 """
return self.__percent
def cancel(self):
""" Allows caller to cancel the transfer operation """
self.__cancelled = True
def is_cancelled(self) -> bool:
""" Allows for AceQL internal for transfer thread to test if cancelled """
return self.__cancelled
| 31.416667
| 93
| 0.67321
|
55cebc01d60cf9af3639dedbf4114becac66c5a6
| 1,932
|
py
|
Python
|
test/test_local_worker.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 2
|
2020-12-30T11:21:43.000Z
|
2021-12-04T16:25:53.000Z
|
test/test_local_worker.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 3
|
2019-05-24T01:16:56.000Z
|
2019-09-18T13:02:30.000Z
|
test/test_local_worker.py
|
Rishav1/PySyft
|
f620ee12727b52b19a317f263789830b57ee2539
|
[
"Apache-2.0"
] | 1
|
2022-03-12T08:04:34.000Z
|
2022-03-12T08:04:34.000Z
|
import syft as sy
import torch as th
def test_client_worker_does_not_register_object(hook):
me = hook.local_worker
me.is_client_worker = True
x = th.tensor([1, 2, 3])
assert x.id not in me._objects
def test_object_registration(hook):
me = hook.local_worker
me.is_client_worker = False
x = th.tensor([1, 2, 3])
assert x.id in me._objects
me.is_client_worker = True
def test_pointer_registration(workers, hook):
alice = workers["alice"]
me = hook.local_worker
me.is_client_worker = False
x_ptr = th.tensor([1, 2, 3]).send(alice)
assert x_ptr.id in me._objects
me.is_client_worker = True
def test_fix_prec_tensor_registration(hook):
me = hook.local_worker
me.is_client_worker = False
x_sh = th.tensor([1.0, 2, 3]).fix_prec()
assert x_sh.id in me._objects
me.is_client_worker = True
def test_shared_tensor_registration(workers, hook):
alice, bob, charlie = workers["alice"], workers["bob"], workers["charlie"]
me = hook.local_worker
me.is_client_worker = False
x_sh = th.tensor([1.0, 2, 3]).fix_prec().share(alice, bob, crypto_provider=charlie)
assert x_sh.id in me._objects
me.is_client_worker = True
def test_shared_tensor_registration_pointer(workers, hook):
alice, bob, charlie, james = (
workers["alice"],
workers["bob"],
workers["charlie"],
workers["james"],
)
me = hook.local_worker
me.is_client_worker = False
x_ptr = th.tensor([1, 2, 3]).send(alice)
x_sh = x_ptr.fix_prec().share(bob, charlie, crypto_provider=james)
assert x_sh.id in me._objects
me.is_client_worker = True
def test_in_known_workers(hook):
# Get local worker
local_worker = hook.local_worker
id = local_worker.id
# Get known workers dict
known_workers = local_worker._known_workers
assert id in known_workers and local_worker == known_workers[id]
| 25.76
| 87
| 0.686335
|
0c3ff6035b856daaeb72333b42682dfb02fa959a
| 21,318
|
py
|
Python
|
myven/lib/python3.8/site-packages/ansible/modules/network/f5/bigip_monitor_snmp_dca.py
|
baltham/dne-dna-code
|
4a13309a790a670d2f07e635c9264a0c29976c6a
|
[
"MIT"
] | 1
|
2021-04-02T08:08:39.000Z
|
2021-04-02T08:08:39.000Z
|
myven/lib/python3.8/site-packages/ansible/modules/network/f5/bigip_monitor_snmp_dca.py
|
baltham/dne-dna-code
|
4a13309a790a670d2f07e635c9264a0c29976c6a
|
[
"MIT"
] | null | null | null |
myven/lib/python3.8/site-packages/ansible/modules/network/f5/bigip_monitor_snmp_dca.py
|
baltham/dne-dna-code
|
4a13309a790a670d2f07e635c9264a0c29976c6a
|
[
"MIT"
] | 1
|
2020-05-03T01:13:16.000Z
|
2020-05-03T01:13:16.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_monitor_snmp_dca
short_description: Manages BIG-IP SNMP data collecting agent (DCA) monitors
description:
- The BIG-IP has an SNMP data collecting agent (DCA) that can query remote
SNMP agents of various types, including the UC Davis agent (UCD) and the
Windows 2000 Server agent (WIN2000).
version_added: "2.5"
options:
name:
description:
- Monitor name.
required: True
description:
description:
- Specifies descriptive text that identifies the monitor.
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(snmp_dca)
parent on the C(Common) partition.
default: "/Common/snmp_dca"
interval:
description:
- Specifies, in seconds, the frequency at which the system issues the
monitor check when either the resource is down or the status of the
resource is unknown. When creating a new monitor, the default is C(10).
timeout:
description:
- Specifies the number of seconds the target has in which to respond to
the monitor request. When creating a new monitor, the default is C(30)
seconds. If the target responds within the set time period, it is
considered 'up'. If the target does not respond within the set time
period, it is considered 'down'. When this value is set to 0 (zero),
the system uses the interval from the parent monitor. Note that
C(timeout) and C(time_until_up) combine to control when a resource is
set to up.
time_until_up:
description:
- Specifies the number of seconds to wait after a resource first responds
correctly to the monitor before setting the resource to 'up'. During the
interval, all responses from the resource must be correct. When the
interval expires, the resource is marked 'up'. A value of 0, means
that the resource is marked up immediately upon receipt of the first
correct response. When creating a new monitor, the default is C(0).
community:
description:
- Specifies the community name that the system must use to authenticate
with the host server through SNMP. When creating a new monitor, the
default value is C(public). Note that this value is case sensitive.
version:
description:
- Specifies the version of SNMP that the host server uses. When creating
a new monitor, the default is C(v1). When C(v1), specifies that the
host server uses SNMP version 1. When C(v2c), specifies that the host
server uses SNMP version 2c.
choices:
- v1
- v2c
agent_type:
description:
- Specifies the SNMP agent running on the monitored server. When creating
a new monitor, the default is C(UCD) (UC-Davis).
choices:
- UCD
- WIN2000
- GENERIC
cpu_coefficient:
description:
- Specifies the coefficient that the system uses to calculate the weight
of the CPU threshold in the dynamic ratio load balancing algorithm.
When creating a new monitor, the default is C(1.5).
cpu_threshold:
description:
- Specifies the maximum acceptable CPU usage on the target server. When
creating a new monitor, the default is C(80) percent.
memory_coefficient:
description:
- Specifies the coefficient that the system uses to calculate the weight
of the memory threshold in the dynamic ratio load balancing algorithm.
When creating a new monitor, the default is C(1.0).
memory_threshold:
description:
- Specifies the maximum acceptable memory usage on the target server.
When creating a new monitor, the default is C(70) percent.
disk_coefficient:
description:
- Specifies the coefficient that the system uses to calculate the weight
of the disk threshold in the dynamic ratio load balancing algorithm.
When creating a new monitor, the default is C(2.0).
disk_threshold:
description:
- Specifies the maximum acceptable disk usage on the target server. When
creating a new monitor, the default is C(90) percent.
partition:
description:
- Device partition to manage resources on.
default: Common
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
- This module does not support the C(variables) option because this option
is broken in the REST API and does not function correctly in C(tmsh); for
example you cannot remove user-defined params. Therefore, there is no way
to automatically configure it.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create SNMP DCS monitor
bigip_monitor_snmp_dca:
state: present
server: lb.mydomain.com
user: admin
password: secret
name: my_monitor
delegate_to: localhost
- name: Remove TCP Echo Monitor
bigip_monitor_snmp_dca:
state: absent
server: lb.mydomain.com
user: admin
password: secret
name: my_monitor
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: string
sample: snmp_dca
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
community:
description: The new community for the monitor.
returned: changed
type: string
sample: foobar
version:
description: The new new SNMP version to be used by the monitor.
returned: changed
type: string
sample: v2c
agent_type:
description: The new agent type to be used by the monitor.
returned: changed
type: string
sample: UCD
cpu_coefficient:
description: The new CPU coefficient.
returned: changed
type: float
sample: 2.4
cpu_threshold:
description: The new CPU threshold.
returned: changed
type: int
sample: 85
memory_coefficient:
description: The new memory coefficient.
returned: changed
type: float
sample: 6.4
memory_threshold:
description: The new memory threshold.
returned: changed
type: int
sample: 50
disk_coefficient:
description: The new disk coefficient.
returned: changed
type: float
sample: 10.2
disk_threshold:
description: The new disk threshold.
returned: changed
type: int
sample: 34
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
HAS_DEVEL_IMPORTS = False
try:
# Sideband repository used for dev
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fqdn_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
HAS_DEVEL_IMPORTS = True
except ImportError:
# Upstream Ansible
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fqdn_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'agentType': 'agent_type',
'cpuCoefficient': 'cpu_coefficient',
'cpuThreshold': 'cpu_threshold',
'memoryCoefficient': 'memory_coefficient',
'memoryThreshold': 'memory_threshold',
'diskCoefficient': 'disk_coefficient',
'diskThreshold': 'disk_threshold'
}
api_attributes = [
'timeUntilUp', 'defaultsFrom', 'interval', 'timeout', 'destination', 'community',
'version', 'agentType', 'cpuCoefficient', 'cpuThreshold', 'memoryCoefficient',
'memoryThreshold', 'diskCoefficient', 'diskThreshold'
]
returnables = [
'parent', 'ip', 'interval', 'timeout', 'time_until_up', 'description', 'community',
'version', 'agent_type', 'cpu_coefficient', 'cpu_threshold', 'memory_coefficient',
'memory_threshold', 'disk_coefficient', 'disk_threshold'
]
updatables = [
'ip', 'interval', 'timeout', 'time_until_up', 'description', 'community',
'version', 'agent_type', 'cpu_coefficient', 'cpu_threshold', 'memory_coefficient',
'memory_threshold', 'disk_coefficient', 'disk_threshold'
]
def _fqdn_name(self, value):
if value is not None and not value.startswith('/'):
return '/{0}/{1}'.format(self.partition, value)
return value
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
except Exception:
return result
@property
def interval(self):
if self._values['interval'] is None:
return None
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = self._fqdn_name(self._values['parent'])
return result
@property
def cpu_coefficient(self):
result = self._get_numeric_property('cpu_coefficient')
return result
@property
def cpu_threshold(self):
result = self._get_numeric_property('cpu_threshold')
return result
@property
def memory_coefficient(self):
result = self._get_numeric_property('memory_coefficient')
return result
@property
def memory_threshold(self):
result = self._get_numeric_property('memory_threshold')
return result
@property
def disk_coefficient(self):
result = self._get_numeric_property('disk_coefficient')
return result
@property
def disk_threshold(self):
result = self._get_numeric_property('disk_threshold')
return result
def _get_numeric_property(self, property):
if self._values[property] is None:
return None
try:
fvar = float(self._values[property])
except ValueError:
raise F5ModuleError(
"Provided {0} must be a valid number".format(property)
)
return fvar
@property
def type(self):
return 'snmp_dca'
class Changes(Parameters):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def _announce_deprecations(self):
warnings = []
if self.want:
warnings += self.want._values.get('__warnings', [])
if self.have:
warnings += self.have._values.get('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations()
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
self._set_changed_options()
if self.want.timeout is None:
self.want.update({'timeout': 30})
if self.want.interval is None:
self.want.update({'interval': 10})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.community is None:
self.want.update({'community': 'public'})
if self.want.version is None:
self.want.update({'version': 'v1'})
if self.want.agent_type is None:
self.want.update({'agent_type': 'UCD'})
if self.want.cpu_coefficient is None:
self.want.update({'cpu_coefficient': '1.5'})
if self.want.cpu_threshold is None:
self.want.update({'cpu_threshold': '80'})
if self.want.memory_coefficient is None:
self.want.update({'memory_coefficient': '1.0'})
if self.want.memory_threshold is None:
self.want.update({'memory_threshold': '70'})
if self.want.disk_coefficient is None:
self.want.update({'disk_coefficient': '2.0'})
if self.want.disk_threshold is None:
self.want.update({'disk_threshold': '90'})
if self.module.check_mode:
return True
self.create_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def absent(self):
if self.exists():
return self.remove()
return False
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the monitor.")
return True
def read_current_from_device(self):
resource = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def exists(self):
result = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update_on_device(self):
params = self.want.api_params()
result = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.load(
name=self.want.name,
partition=self.want.partition
)
result.modify(**params)
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove_from_device(self):
result = self.client.api.tm.ltm.monitor.snmp_dcas.snmp_dca.load(
name=self.want.name,
partition=self.want.partition
)
if result:
result.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
description=dict(),
parent=dict(default='/Common/snmp_dca'),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
community=dict(),
version=dict(),
agent_type=dict(
choices=['UCD', 'WIN2000', 'GENERIC']
),
cpu_coefficient=dict(),
cpu_threshold=dict(type='int'),
memory_coefficient=dict(),
memory_threshold=dict(type='int'),
disk_coefficient=dict(),
disk_threshold=dict(type='int'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| 32.398176
| 92
| 0.63627
|
3cdbd5c02977f1ff36035a2c0a26292f63896401
| 16,190
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_wanopt_profile_tcp.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_wanopt_profile_tcp.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_wanopt_profile_tcp.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2021 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_wanopt_profile_tcp
short_description: Enable/disable TCP WAN Optimization and configure TCP WAN Optimization features.
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
enable_log:
description: Enable/Disable logging for task
required: false
type: bool
default: false
proposed_method:
description: The overridden method for the underlying Json RPC request
required: false
type: str
choices:
- update
- set
- add
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
state:
description: the directive to create, update or delete an object
type: str
required: true
choices:
- present
- absent
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
adom:
description: the parameter (adom) in requested url
type: str
required: true
profile:
description: the parameter (profile) in requested url
type: str
required: true
wanopt_profile_tcp:
description: the top level parameters set
required: false
type: dict
suboptions:
byte-caching:
type: str
description: 'Enable/disable byte-caching for HTTP. Byte caching reduces the amount of traffic by caching file data sent across the WAN and ...'
choices:
- 'disable'
- 'enable'
byte-caching-opt:
type: str
description: 'Select whether TCP byte-caching uses system memory only or both memory and disk space.'
choices:
- 'mem-only'
- 'mem-disk'
log-traffic:
type: str
description: 'Enable/disable logging.'
choices:
- 'disable'
- 'enable'
port:
type: str
description: 'Single port number or port number range for TCP. Only packets with a destination port number that matches this port number or ...'
secure-tunnel:
type: str
description: 'Enable/disable securing the WAN Opt tunnel using SSL. Secure and non-secure tunnels use the same TCP port (7810).'
choices:
- 'disable'
- 'enable'
ssl:
type: str
description: 'Enable/disable SSL/TLS offloading.'
choices:
- 'disable'
- 'enable'
ssl-port:
description: no description
type: int
status:
type: str
description: 'Enable/disable HTTP WAN Optimization.'
choices:
- 'disable'
- 'enable'
tunnel-sharing:
type: str
description: 'Tunnel sharing mode for aggressive/non-aggressive and/or interactive/non-interactive protocols.'
choices:
- 'private'
- 'shared'
- 'express-shared'
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: Enable/disable TCP WAN Optimization and configure TCP WAN Optimization features.
fmgr_wanopt_profile_tcp:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
adom: <your own value>
profile: <your own value>
wanopt_profile_tcp:
byte-caching: <value in [disable, enable]>
byte-caching-opt: <value in [mem-only, mem-disk]>
log-traffic: <value in [disable, enable]>
port: <value of string>
secure-tunnel: <value in [disable, enable]>
ssl: <value in [disable, enable]>
ssl-port: <value of integer>
status: <value in [disable, enable]>
tunnel-sharing: <value in [private, shared, express-shared]>
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/pm/config/adom/{adom}/obj/wanopt/profile/{profile}/tcp',
'/pm/config/global/obj/wanopt/profile/{profile}/tcp'
]
perobject_jrpc_urls = [
'/pm/config/adom/{adom}/obj/wanopt/profile/{profile}/tcp/{tcp}',
'/pm/config/global/obj/wanopt/profile/{profile}/tcp/{tcp}'
]
url_params = ['adom', 'profile']
module_primary_key = None
module_arg_spec = {
'enable_log': {
'type': 'bool',
'required': False,
'default': False
},
'forticloud_access_token': {
'type': 'str',
'required': False,
'no_log': True
},
'proposed_method': {
'type': 'str',
'required': False,
'choices': [
'set',
'update',
'add'
]
},
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'adom': {
'required': True,
'type': 'str'
},
'profile': {
'required': True,
'type': 'str'
},
'wanopt_profile_tcp': {
'required': False,
'type': 'dict',
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'options': {
'byte-caching': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'byte-caching-opt': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'mem-only',
'mem-disk'
],
'type': 'str'
},
'log-traffic': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'port': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'secure-tunnel': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'ssl': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'ssl-port': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'int'
},
'status': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
},
'tunnel-sharing': {
'required': False,
'revision': {
'6.0.0': True,
'6.2.1': True,
'6.2.3': True,
'6.2.5': True,
'6.4.0': True,
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'private',
'shared',
'express-shared'
],
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'wanopt_profile_tcp'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
connection.set_option('enable_log', module.params['enable_log'] if 'enable_log' in module.params else False)
connection.set_option('forticloud_access_token',
module.params['forticloud_access_token'] if 'forticloud_access_token' in module.params else None)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_partial_curd(argument_specs=module_arg_spec)
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
| 34.520256
| 160
| 0.455405
|
d2012e25b3471e6125d0168663b2ce10662b859c
| 2,038
|
py
|
Python
|
data.py
|
rodoufu/btc-rawtx-rest
|
0a53e32eb16217112e0ed4f5e43b345157378acb
|
[
"MIT"
] | 3
|
2020-03-20T10:01:37.000Z
|
2021-09-27T20:40:22.000Z
|
data.py
|
rodoufu/btc-rawtx-rest
|
0a53e32eb16217112e0ed4f5e43b345157378acb
|
[
"MIT"
] | null | null | null |
data.py
|
rodoufu/btc-rawtx-rest
|
0a53e32eb16217112e0ed4f5e43b345157378acb
|
[
"MIT"
] | null | null | null |
import json
class TransactionInput(object):
"""
Transaction input for the service.
"""
def __init__(self, source_address: str, outputs: dict, fee_kb: int):
self.source_address = source_address
self.outputs = outputs
self.fee_kb = fee_kb
def __str__(self):
return json.dumps(self, default=lambda o: o.__dict__)
def __json__(self):
return self.__dict__
class TransactionOutputItem(object):
"""
Item in the transaction output.
"""
def __init__(self, txid: str, vout: int, script_pub_key: str, amount: int):
self.txid = txid
self.vout = vout
self.script_pub_key = script_pub_key
self.amount = amount
def __str__(self):
return json.dumps(self, default=lambda o: o.__dict__)
def __json__(self):
return self.__dict__
def __lt__(self, other):
return self.amount < other.amount
class TransactionOutput(object):
"""
Transaction output for the service.
"""
def __init__(self, raw: str, inputs: list):
self.raw = raw
self.inputs = inputs
def __str__(self):
return json.dumps(self, default=lambda o: o.__dict__)
def __json__(self):
return self.__dict__
class SelectedInfo(object):
"""
Information regarding the selected UTXO for the transaction
"""
def __init__(self, fee_value: int, raw: str, selected: list, outputs: dict):
self.fee_value = fee_value
self.raw = raw
self.selected = selected
self.selected_sum = sum([x['value'] for x in self.selected])
self.outputs = outputs
def __lt__(self, other):
"""
In the case it finds a smaller fee or less UTXO are used or less no change is necessary
:param other: The other object to compare.
:return: True case self is the smaller.
"""
if self.fee_value < other.fee_value:
return True
if self.fee_value == other.fee_value:
if len(self.selected) < len(other.selected):
return True
if len(self.outputs) < len(other.outputs):
return True
if self.selected_sum < other.selected_sum:
return True
return False
def __str__(self):
return json.dumps(self, default=lambda o: o.__dict__)
| 22.898876
| 89
| 0.713445
|
e2698b17817d610a8e8e8b1357ce58d5346277a2
| 31,171
|
py
|
Python
|
virtual/lib/python3.8/site-packages/sqlalchemy/dialects/firebird/base.py
|
Esther-Anyona/mylearner
|
d49d1c4c8dbeb93cc384f2037c48236be5dc89e1
|
[
"MIT"
] | 4
|
2022-02-06T00:54:58.000Z
|
2022-02-25T12:44:43.000Z
|
virtual/lib/python3.8/site-packages/sqlalchemy/dialects/firebird/base.py
|
Esther-Anyona/mylearner
|
d49d1c4c8dbeb93cc384f2037c48236be5dc89e1
|
[
"MIT"
] | 1
|
2022-03-12T06:00:21.000Z
|
2022-03-12T07:07:55.000Z
|
virtual/lib/python3.8/site-packages/sqlalchemy/dialects/firebird/base.py
|
Esther-Anyona/mylearner
|
d49d1c4c8dbeb93cc384f2037c48236be5dc89e1
|
[
"MIT"
] | 1
|
2022-02-08T13:43:20.000Z
|
2022-02-08T13:43:20.000Z
|
# firebird/base.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
r"""
.. dialect:: firebird
:name: Firebird
.. note::
The Firebird dialect within SQLAlchemy **is not currently supported**.
It is not tested within continuous integration and is likely to have
many issues and caveats not currently handled. Consider using the
`external dialect <https://github.com/pauldex/sqlalchemy-firebird>`_
instead.
.. deprecated:: 1.4 The internal Firebird dialect is deprecated and will be
removed in a future version. Use the external dialect.
Firebird Dialects
-----------------
Firebird offers two distinct dialects_ (not to be confused with a
SQLAlchemy ``Dialect``):
dialect 1
This is the old syntax and behaviour, inherited from Interbase pre-6.0.
dialect 3
This is the newer and supported syntax, introduced in Interbase 6.0.
The SQLAlchemy Firebird dialect detects these versions and
adjusts its representation of SQL accordingly. However,
support for dialect 1 is not well tested and probably has
incompatibilities.
Locking Behavior
----------------
Firebird locks tables aggressively. For this reason, a DROP TABLE may
hang until other transactions are released. SQLAlchemy does its best
to release transactions as quickly as possible. The most common cause
of hanging transactions is a non-fully consumed result set, i.e.::
result = engine.execute(text("select * from table"))
row = result.fetchone()
return
Where above, the ``CursorResult`` has not been fully consumed. The
connection will be returned to the pool and the transactional state
rolled back once the Python garbage collector reclaims the objects
which hold onto the connection, which often occurs asynchronously.
The above use case can be alleviated by calling ``first()`` on the
``CursorResult`` which will fetch the first row and immediately close
all remaining cursor/connection resources.
RETURNING support
-----------------
Firebird 2.0 supports returning a result set from inserts, and 2.1
extends that to deletes and updates. This is generically exposed by
the SQLAlchemy ``returning()`` method, such as::
# INSERT..RETURNING
result = table.insert().returning(table.c.col1, table.c.col2).\
values(name='foo')
print(result.fetchall())
# UPDATE..RETURNING
raises = empl.update().returning(empl.c.id, empl.c.salary).\
where(empl.c.sales>100).\
values(dict(salary=empl.c.salary * 1.1))
print(raises.fetchall())
.. _dialects: https://mc-computing.com/Databases/Firebird/SQL_Dialect.html
"""
import datetime
from sqlalchemy import exc
from sqlalchemy import sql
from sqlalchemy import types as sqltypes
from sqlalchemy import util
from sqlalchemy.engine import default
from sqlalchemy.engine import reflection
from sqlalchemy.sql import compiler
from sqlalchemy.sql import expression
from sqlalchemy.types import BIGINT
from sqlalchemy.types import BLOB
from sqlalchemy.types import DATE
from sqlalchemy.types import FLOAT
from sqlalchemy.types import INTEGER
from sqlalchemy.types import Integer
from sqlalchemy.types import NUMERIC
from sqlalchemy.types import SMALLINT
from sqlalchemy.types import TEXT
from sqlalchemy.types import TIME
from sqlalchemy.types import TIMESTAMP
RESERVED_WORDS = set(
[
"active",
"add",
"admin",
"after",
"all",
"alter",
"and",
"any",
"as",
"asc",
"ascending",
"at",
"auto",
"avg",
"before",
"begin",
"between",
"bigint",
"bit_length",
"blob",
"both",
"by",
"case",
"cast",
"char",
"character",
"character_length",
"char_length",
"check",
"close",
"collate",
"column",
"commit",
"committed",
"computed",
"conditional",
"connect",
"constraint",
"containing",
"count",
"create",
"cross",
"cstring",
"current",
"current_connection",
"current_date",
"current_role",
"current_time",
"current_timestamp",
"current_transaction",
"current_user",
"cursor",
"database",
"date",
"day",
"dec",
"decimal",
"declare",
"default",
"delete",
"desc",
"descending",
"disconnect",
"distinct",
"do",
"domain",
"double",
"drop",
"else",
"end",
"entry_point",
"escape",
"exception",
"execute",
"exists",
"exit",
"external",
"extract",
"fetch",
"file",
"filter",
"float",
"for",
"foreign",
"from",
"full",
"function",
"gdscode",
"generator",
"gen_id",
"global",
"grant",
"group",
"having",
"hour",
"if",
"in",
"inactive",
"index",
"inner",
"input_type",
"insensitive",
"insert",
"int",
"integer",
"into",
"is",
"isolation",
"join",
"key",
"leading",
"left",
"length",
"level",
"like",
"long",
"lower",
"manual",
"max",
"maximum_segment",
"merge",
"min",
"minute",
"module_name",
"month",
"names",
"national",
"natural",
"nchar",
"no",
"not",
"null",
"numeric",
"octet_length",
"of",
"on",
"only",
"open",
"option",
"or",
"order",
"outer",
"output_type",
"overflow",
"page",
"pages",
"page_size",
"parameter",
"password",
"plan",
"position",
"post_event",
"precision",
"primary",
"privileges",
"procedure",
"protected",
"rdb$db_key",
"read",
"real",
"record_version",
"recreate",
"recursive",
"references",
"release",
"reserv",
"reserving",
"retain",
"returning_values",
"returns",
"revoke",
"right",
"rollback",
"rows",
"row_count",
"savepoint",
"schema",
"second",
"segment",
"select",
"sensitive",
"set",
"shadow",
"shared",
"singular",
"size",
"smallint",
"snapshot",
"some",
"sort",
"sqlcode",
"stability",
"start",
"starting",
"starts",
"statistics",
"sub_type",
"sum",
"suspend",
"table",
"then",
"time",
"timestamp",
"to",
"trailing",
"transaction",
"trigger",
"trim",
"uncommitted",
"union",
"unique",
"update",
"upper",
"user",
"using",
"value",
"values",
"varchar",
"variable",
"varying",
"view",
"wait",
"when",
"where",
"while",
"with",
"work",
"write",
"year",
]
)
class _StringType(sqltypes.String):
"""Base for Firebird string types."""
def __init__(self, charset=None, **kw):
self.charset = charset
super(_StringType, self).__init__(**kw)
class VARCHAR(_StringType, sqltypes.VARCHAR):
"""Firebird VARCHAR type"""
__visit_name__ = "VARCHAR"
def __init__(self, length=None, **kwargs):
super(VARCHAR, self).__init__(length=length, **kwargs)
class CHAR(_StringType, sqltypes.CHAR):
"""Firebird CHAR type"""
__visit_name__ = "CHAR"
def __init__(self, length=None, **kwargs):
super(CHAR, self).__init__(length=length, **kwargs)
class _FBDateTime(sqltypes.DateTime):
def bind_processor(self, dialect):
def process(value):
if type(value) == datetime.date:
return datetime.datetime(value.year, value.month, value.day)
else:
return value
return process
colspecs = {sqltypes.DateTime: _FBDateTime}
ischema_names = {
"SHORT": SMALLINT,
"LONG": INTEGER,
"QUAD": FLOAT,
"FLOAT": FLOAT,
"DATE": DATE,
"TIME": TIME,
"TEXT": TEXT,
"INT64": BIGINT,
"DOUBLE": FLOAT,
"TIMESTAMP": TIMESTAMP,
"VARYING": VARCHAR,
"CSTRING": CHAR,
"BLOB": BLOB,
}
# TODO: date conversion types (should be implemented as _FBDateTime,
# _FBDate, etc. as bind/result functionality is required)
class FBTypeCompiler(compiler.GenericTypeCompiler):
def visit_boolean(self, type_, **kw):
return self.visit_SMALLINT(type_, **kw)
def visit_datetime(self, type_, **kw):
return self.visit_TIMESTAMP(type_, **kw)
def visit_TEXT(self, type_, **kw):
return "BLOB SUB_TYPE 1"
def visit_BLOB(self, type_, **kw):
return "BLOB SUB_TYPE 0"
def _extend_string(self, type_, basic):
charset = getattr(type_, "charset", None)
if charset is None:
return basic
else:
return "%s CHARACTER SET %s" % (basic, charset)
def visit_CHAR(self, type_, **kw):
basic = super(FBTypeCompiler, self).visit_CHAR(type_, **kw)
return self._extend_string(type_, basic)
def visit_VARCHAR(self, type_, **kw):
if not type_.length:
raise exc.CompileError(
"VARCHAR requires a length on dialect %s" % self.dialect.name
)
basic = super(FBTypeCompiler, self).visit_VARCHAR(type_, **kw)
return self._extend_string(type_, basic)
class FBCompiler(sql.compiler.SQLCompiler):
"""Firebird specific idiosyncrasies"""
ansi_bind_rules = True
# def visit_contains_op_binary(self, binary, operator, **kw):
# cant use CONTAINING b.c. it's case insensitive.
# def visit_not_contains_op_binary(self, binary, operator, **kw):
# cant use NOT CONTAINING b.c. it's case insensitive.
def visit_now_func(self, fn, **kw):
return "CURRENT_TIMESTAMP"
def visit_startswith_op_binary(self, binary, operator, **kw):
return "%s STARTING WITH %s" % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw),
)
def visit_not_startswith_op_binary(self, binary, operator, **kw):
return "%s NOT STARTING WITH %s" % (
binary.left._compiler_dispatch(self, **kw),
binary.right._compiler_dispatch(self, **kw),
)
def visit_mod_binary(self, binary, operator, **kw):
return "mod(%s, %s)" % (
self.process(binary.left, **kw),
self.process(binary.right, **kw),
)
def visit_alias(self, alias, asfrom=False, **kwargs):
if self.dialect._version_two:
return super(FBCompiler, self).visit_alias(
alias, asfrom=asfrom, **kwargs
)
else:
# Override to not use the AS keyword which FB 1.5 does not like
if asfrom:
alias_name = (
isinstance(alias.name, expression._truncated_label)
and self._truncated_identifier("alias", alias.name)
or alias.name
)
return (
self.process(alias.element, asfrom=asfrom, **kwargs)
+ " "
+ self.preparer.format_alias(alias, alias_name)
)
else:
return self.process(alias.element, **kwargs)
def visit_substring_func(self, func, **kw):
s = self.process(func.clauses.clauses[0])
start = self.process(func.clauses.clauses[1])
if len(func.clauses.clauses) > 2:
length = self.process(func.clauses.clauses[2])
return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length)
else:
return "SUBSTRING(%s FROM %s)" % (s, start)
def visit_length_func(self, function, **kw):
if self.dialect._version_two:
return "char_length" + self.function_argspec(function)
else:
return "strlen" + self.function_argspec(function)
visit_char_length_func = visit_length_func
def function_argspec(self, func, **kw):
# TODO: this probably will need to be
# narrowed to a fixed list, some no-arg functions
# may require parens - see similar example in the oracle
# dialect
if func.clauses is not None and len(func.clauses):
return self.process(func.clause_expr, **kw)
else:
return ""
def default_from(self):
return " FROM rdb$database"
def visit_sequence(self, seq, **kw):
return "gen_id(%s, 1)" % self.preparer.format_sequence(seq)
def get_select_precolumns(self, select, **kw):
"""Called when building a ``SELECT`` statement, position is just
before column list Firebird puts the limit and offset right
after the ``SELECT``...
"""
result = ""
if select._limit_clause is not None:
result += "FIRST %s " % self.process(select._limit_clause, **kw)
if select._offset_clause is not None:
result += "SKIP %s " % self.process(select._offset_clause, **kw)
result += super(FBCompiler, self).get_select_precolumns(select, **kw)
return result
def limit_clause(self, select, **kw):
"""Already taken care of in the `get_select_precolumns` method."""
return ""
def returning_clause(self, stmt, returning_cols):
columns = [
self._label_returning_column(stmt, c)
for c in expression._select_iterables(returning_cols)
]
return "RETURNING " + ", ".join(columns)
class FBDDLCompiler(sql.compiler.DDLCompiler):
"""Firebird syntactic idiosyncrasies"""
def visit_create_sequence(self, create):
"""Generate a ``CREATE GENERATOR`` statement for the sequence."""
# no syntax for these
# https://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html
if create.element.start is not None:
raise NotImplementedError(
"Firebird SEQUENCE doesn't support START WITH"
)
if create.element.increment is not None:
raise NotImplementedError(
"Firebird SEQUENCE doesn't support INCREMENT BY"
)
if self.dialect._version_two:
return "CREATE SEQUENCE %s" % self.preparer.format_sequence(
create.element
)
else:
return "CREATE GENERATOR %s" % self.preparer.format_sequence(
create.element
)
def visit_drop_sequence(self, drop):
"""Generate a ``DROP GENERATOR`` statement for the sequence."""
if self.dialect._version_two:
return "DROP SEQUENCE %s" % self.preparer.format_sequence(
drop.element
)
else:
return "DROP GENERATOR %s" % self.preparer.format_sequence(
drop.element
)
def visit_computed_column(self, generated):
if generated.persisted is not None:
raise exc.CompileError(
"Firebird computed columns do not support a persistence "
"method setting; set the 'persisted' flag to None for "
"Firebird support."
)
return "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
generated.sqltext, include_table=False, literal_binds=True
)
class FBIdentifierPreparer(sql.compiler.IdentifierPreparer):
"""Install Firebird specific reserved words."""
reserved_words = RESERVED_WORDS
illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(
["_"]
)
def __init__(self, dialect):
super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True)
class FBExecutionContext(default.DefaultExecutionContext):
def fire_sequence(self, seq, type_):
"""Get the next value from the sequence using ``gen_id()``."""
return self._execute_scalar(
"SELECT gen_id(%s, 1) FROM rdb$database"
% self.identifier_preparer.format_sequence(seq),
type_,
)
class FBDialect(default.DefaultDialect):
"""Firebird dialect"""
name = "firebird"
supports_statement_cache = True
max_identifier_length = 31
supports_sequences = True
sequences_optional = False
supports_default_values = True
postfetch_lastrowid = False
supports_native_boolean = False
requires_name_normalize = True
supports_empty_insert = False
statement_compiler = FBCompiler
ddl_compiler = FBDDLCompiler
preparer = FBIdentifierPreparer
type_compiler = FBTypeCompiler
execution_ctx_cls = FBExecutionContext
colspecs = colspecs
ischema_names = ischema_names
construct_arguments = []
# defaults to dialect ver. 3,
# will be autodetected off upon
# first connect
_version_two = True
def __init__(self, *args, **kwargs):
util.warn_deprecated(
"The firebird dialect is deprecated and will be removed "
"in a future version. This dialect is superseded by the external "
"dialect https://github.com/pauldex/sqlalchemy-firebird.",
version="1.4",
)
super(FBDialect, self).__init__(*args, **kwargs)
def initialize(self, connection):
super(FBDialect, self).initialize(connection)
self._version_two = (
"firebird" in self.server_version_info
and self.server_version_info >= (2,)
) or (
"interbase" in self.server_version_info
and self.server_version_info >= (6,)
)
if not self._version_two:
# TODO: whatever other pre < 2.0 stuff goes here
self.ischema_names = ischema_names.copy()
self.ischema_names["TIMESTAMP"] = sqltypes.DATE
self.colspecs = {sqltypes.DateTime: sqltypes.DATE}
self.implicit_returning = self._version_two and self.__dict__.get(
"implicit_returning", True
)
def has_table(self, connection, table_name, schema=None):
"""Return ``True`` if the given table exists, ignoring
the `schema`."""
self._ensure_has_table_connection(connection)
tblqry = """
SELECT 1 AS has_table FROM rdb$database
WHERE EXISTS (SELECT rdb$relation_name
FROM rdb$relations
WHERE rdb$relation_name=?)
"""
c = connection.exec_driver_sql(
tblqry, [self.denormalize_name(table_name)]
)
return c.first() is not None
def has_sequence(self, connection, sequence_name, schema=None):
"""Return ``True`` if the given sequence (generator) exists."""
genqry = """
SELECT 1 AS has_sequence FROM rdb$database
WHERE EXISTS (SELECT rdb$generator_name
FROM rdb$generators
WHERE rdb$generator_name=?)
"""
c = connection.exec_driver_sql(
genqry, [self.denormalize_name(sequence_name)]
)
return c.first() is not None
@reflection.cache
def get_table_names(self, connection, schema=None, **kw):
# there are two queries commonly mentioned for this.
# this one, using view_blr, is at the Firebird FAQ among other places:
# https://www.firebirdfaq.org/faq174/
s = """
select rdb$relation_name
from rdb$relations
where rdb$view_blr is null
and (rdb$system_flag is null or rdb$system_flag = 0);
"""
# the other query is this one. It's not clear if there's really
# any difference between these two. This link:
# https://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8
# states them as interchangeable. Some discussion at [ticket:2898]
# SELECT DISTINCT rdb$relation_name
# FROM rdb$relation_fields
# WHERE rdb$system_flag=0 AND rdb$view_context IS NULL
return [
self.normalize_name(row[0])
for row in connection.exec_driver_sql(s)
]
@reflection.cache
def get_view_names(self, connection, schema=None, **kw):
# see https://www.firebirdfaq.org/faq174/
s = """
select rdb$relation_name
from rdb$relations
where rdb$view_blr is not null
and (rdb$system_flag is null or rdb$system_flag = 0);
"""
return [
self.normalize_name(row[0])
for row in connection.exec_driver_sql(s)
]
@reflection.cache
def get_view_definition(self, connection, view_name, schema=None, **kw):
qry = """
SELECT rdb$view_source AS view_source
FROM rdb$relations
WHERE rdb$relation_name=?
"""
rp = connection.exec_driver_sql(
qry, [self.denormalize_name(view_name)]
)
row = rp.first()
if row:
return row["view_source"]
else:
return None
@reflection.cache
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
# Query to extract the PK/FK constrained fields of the given table
keyqry = """
SELECT se.rdb$field_name AS fname
FROM rdb$relation_constraints rc
JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
"""
tablename = self.denormalize_name(table_name)
# get primary key fields
c = connection.exec_driver_sql(keyqry, ["PRIMARY KEY", tablename])
pkfields = [self.normalize_name(r["fname"]) for r in c.fetchall()]
return {"constrained_columns": pkfields, "name": None}
@reflection.cache
def get_column_sequence(
self, connection, table_name, column_name, schema=None, **kw
):
tablename = self.denormalize_name(table_name)
colname = self.denormalize_name(column_name)
# Heuristic-query to determine the generator associated to a PK field
genqry = """
SELECT trigdep.rdb$depended_on_name AS fgenerator
FROM rdb$dependencies tabdep
JOIN rdb$dependencies trigdep
ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name
AND trigdep.rdb$depended_on_type=14
AND trigdep.rdb$dependent_type=2
JOIN rdb$triggers trig ON
trig.rdb$trigger_name=tabdep.rdb$dependent_name
WHERE tabdep.rdb$depended_on_name=?
AND tabdep.rdb$depended_on_type=0
AND trig.rdb$trigger_type=1
AND tabdep.rdb$field_name=?
AND (SELECT count(*)
FROM rdb$dependencies trigdep2
WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2
"""
genr = connection.exec_driver_sql(genqry, [tablename, colname]).first()
if genr is not None:
return dict(name=self.normalize_name(genr["fgenerator"]))
@reflection.cache
def get_columns(self, connection, table_name, schema=None, **kw):
# Query to extract the details of all the fields of the given table
tblqry = """
SELECT r.rdb$field_name AS fname,
r.rdb$null_flag AS null_flag,
t.rdb$type_name AS ftype,
f.rdb$field_sub_type AS stype,
f.rdb$field_length/
COALESCE(cs.rdb$bytes_per_character,1) AS flen,
f.rdb$field_precision AS fprec,
f.rdb$field_scale AS fscale,
COALESCE(r.rdb$default_source,
f.rdb$default_source) AS fdefault
FROM rdb$relation_fields r
JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name
JOIN rdb$types t
ON t.rdb$type=f.rdb$field_type AND
t.rdb$field_name='RDB$FIELD_TYPE'
LEFT JOIN rdb$character_sets cs ON
f.rdb$character_set_id=cs.rdb$character_set_id
WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=?
ORDER BY r.rdb$field_position
"""
# get the PK, used to determine the eventual associated sequence
pk_constraint = self.get_pk_constraint(connection, table_name)
pkey_cols = pk_constraint["constrained_columns"]
tablename = self.denormalize_name(table_name)
# get all of the fields for this table
c = connection.exec_driver_sql(tblqry, [tablename])
cols = []
while True:
row = c.fetchone()
if row is None:
break
name = self.normalize_name(row["fname"])
orig_colname = row["fname"]
# get the data type
colspec = row["ftype"].rstrip()
coltype = self.ischema_names.get(colspec)
if coltype is None:
util.warn(
"Did not recognize type '%s' of column '%s'"
% (colspec, name)
)
coltype = sqltypes.NULLTYPE
elif issubclass(coltype, Integer) and row["fprec"] != 0:
coltype = NUMERIC(
precision=row["fprec"], scale=row["fscale"] * -1
)
elif colspec in ("VARYING", "CSTRING"):
coltype = coltype(row["flen"])
elif colspec == "TEXT":
coltype = TEXT(row["flen"])
elif colspec == "BLOB":
if row["stype"] == 1:
coltype = TEXT()
else:
coltype = BLOB()
else:
coltype = coltype()
# does it have a default value?
defvalue = None
if row["fdefault"] is not None:
# the value comes down as "DEFAULT 'value'": there may be
# more than one whitespace around the "DEFAULT" keyword
# and it may also be lower case
# (see also https://tracker.firebirdsql.org/browse/CORE-356)
defexpr = row["fdefault"].lstrip()
assert defexpr[:8].rstrip().upper() == "DEFAULT", (
"Unrecognized default value: %s" % defexpr
)
defvalue = defexpr[8:].strip()
if defvalue == "NULL":
# Redundant
defvalue = None
col_d = {
"name": name,
"type": coltype,
"nullable": not bool(row["null_flag"]),
"default": defvalue,
"autoincrement": "auto",
}
if orig_colname.lower() == orig_colname:
col_d["quote"] = True
# if the PK is a single field, try to see if its linked to
# a sequence thru a trigger
if len(pkey_cols) == 1 and name == pkey_cols[0]:
seq_d = self.get_column_sequence(connection, tablename, name)
if seq_d is not None:
col_d["sequence"] = seq_d
cols.append(col_d)
return cols
@reflection.cache
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
# Query to extract the details of each UK/FK of the given table
fkqry = """
SELECT rc.rdb$constraint_name AS cname,
cse.rdb$field_name AS fname,
ix2.rdb$relation_name AS targetrname,
se.rdb$field_name AS targetfname
FROM rdb$relation_constraints rc
JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name
JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key
JOIN rdb$index_segments cse ON
cse.rdb$index_name=ix1.rdb$index_name
JOIN rdb$index_segments se
ON se.rdb$index_name=ix2.rdb$index_name
AND se.rdb$field_position=cse.rdb$field_position
WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=?
ORDER BY se.rdb$index_name, se.rdb$field_position
"""
tablename = self.denormalize_name(table_name)
c = connection.exec_driver_sql(fkqry, ["FOREIGN KEY", tablename])
fks = util.defaultdict(
lambda: {
"name": None,
"constrained_columns": [],
"referred_schema": None,
"referred_table": None,
"referred_columns": [],
}
)
for row in c:
cname = self.normalize_name(row["cname"])
fk = fks[cname]
if not fk["name"]:
fk["name"] = cname
fk["referred_table"] = self.normalize_name(row["targetrname"])
fk["constrained_columns"].append(self.normalize_name(row["fname"]))
fk["referred_columns"].append(
self.normalize_name(row["targetfname"])
)
return list(fks.values())
@reflection.cache
def get_indexes(self, connection, table_name, schema=None, **kw):
qry = """
SELECT ix.rdb$index_name AS index_name,
ix.rdb$unique_flag AS unique_flag,
ic.rdb$field_name AS field_name
FROM rdb$indices ix
JOIN rdb$index_segments ic
ON ix.rdb$index_name=ic.rdb$index_name
LEFT OUTER JOIN rdb$relation_constraints
ON rdb$relation_constraints.rdb$index_name =
ic.rdb$index_name
WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL
AND rdb$relation_constraints.rdb$constraint_type IS NULL
ORDER BY index_name, ic.rdb$field_position
"""
c = connection.exec_driver_sql(
qry, [self.denormalize_name(table_name)]
)
indexes = util.defaultdict(dict)
for row in c:
indexrec = indexes[row["index_name"]]
if "name" not in indexrec:
indexrec["name"] = self.normalize_name(row["index_name"])
indexrec["column_names"] = []
indexrec["unique"] = bool(row["unique_flag"])
indexrec["column_names"].append(
self.normalize_name(row["field_name"])
)
return list(indexes.values())
| 31.485859
| 79
| 0.57345
|
9ec49575a529f5eb12fc57d8403b5cd070780ae7
| 6,721
|
py
|
Python
|
lda2vec/utils.py
|
annaelvira24/Lda2vec-Tensorflow
|
57aa112db5f7fed6542e22f91632cae3e98e46ef
|
[
"MIT"
] | null | null | null |
lda2vec/utils.py
|
annaelvira24/Lda2vec-Tensorflow
|
57aa112db5f7fed6542e22f91632cae3e98e46ef
|
[
"MIT"
] | null | null | null |
lda2vec/utils.py
|
annaelvira24/Lda2vec-Tensorflow
|
57aa112db5f7fed6542e22f91632cae3e98e46ef
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from sklearn.utils import shuffle
import pickle
import pyLDAvis
import os
import random
def _softmax(x):
e_x = np.exp(x - np.max(x))
out = e_x / e_x.sum()
return out
def _softmax_2d(x):
y = x - x.max(axis=1, keepdims=True)
np.exp(y, out=y)
y /= y.sum(axis=1, keepdims=True)
return y
def prob_words(context, vocab, temperature=1.0):
""" This calculates a softmax over the vocabulary as a function
of the dot product of context and word.
"""
dot = np.dot(vocab, context)
prob = _softmax(dot / temperature)
return prob
def prepare_topics(weights, factors, word_vectors, vocab, temperature=1.0,
doc_lengths=None, term_frequency=None, normalize=False):
""" Collects a dictionary of word, document and topic distributions.
Arguments
---------
weights : float array
This must be an array of unnormalized log-odds of document-to-topic
weights. Shape should be [n_documents, n_topics]
factors : float array
Should be an array of topic vectors. These topic vectors live in the
same space as word vectors and will be used to find the most similar
words to each topic. Shape should be [n_topics, n_dim].
word_vectors : float array
This must be a matrix of word vectors. Should be of shape
[n_words, n_dim]
vocab : list of str
These must be the strings for words corresponding to
indices [0, n_words]
temperature : float
Used to calculate the log probability of a word. Higher
temperatures make more rare words more likely.
doc_lengths : int array
An array indicating the number of words in the nth document.
Must be of shape [n_documents]. Required by pyLDAvis.
term_frequency : int array
An array indicating the overall number of times each token appears
in the corpus. Must be of shape [n_words]. Required by pyLDAvis.
Returns
-------
data : dict
This dictionary is readily consumed by pyLDAVis for topic
visualization.
"""
# Map each factor vector to a word
topic_to_word = []
msg = "Vocabulary size did not match size of word vectors"
assert len(vocab) == word_vectors.shape[0], msg
if normalize:
word_vectors /= np.linalg.norm(word_vectors, axis=1)[:, None]
# factors = factors / np.linalg.norm(factors, axis=1)[:, None]
for factor_vector in factors:
factor_to_word = prob_words(factor_vector, word_vectors,
temperature=temperature)
topic_to_word.append(np.ravel(factor_to_word))
topic_to_word = np.array(topic_to_word)
msg = "Not all rows in topic_to_word sum to 1"
assert np.allclose(np.sum(topic_to_word, axis=1), 1), msg
# Collect document-to-topic distributions, e.g. theta
doc_to_topic = _softmax_2d(weights)
msg = "Not all rows in doc_to_topic sum to 1"
assert np.allclose(np.sum(doc_to_topic, axis=1), 1), msg
data = {'topic_term_dists': topic_to_word,
'doc_topic_dists': doc_to_topic,
'doc_lengths': doc_lengths,
'vocab': vocab,
'term_frequency': term_frequency}
return data
def load_preprocessed_data(data_path, load_embed_matrix=False, shuffle_data=True):
"""Load in all data that was processed via the preprocessing files included
in this library. Optionally load embedding matrix, if you preprocessed and saved one
in the data_path.
Parameters
----------
data_path : TYPE
Path where all your data is stored. Should be same path you passed the preprocessor when you called save_data()
load_embed_matrix : bool, optional
If True, load embedding_matrix.npy file found in data_path.
shuffle : bool, optional
If True, it will shuffle the skipgrams dataframe when we load it in. Otherwise, it will leave it in order.
Returns
-------
TYPE
Description
"""
# Reload all data
with open(data_path + "/" + "idx_to_word.pickle", "rb") as i2w_in:
idx_to_word = pickle.load(i2w_in)
with open(data_path + "/" + "word_to_idx.pickle", "rb") as w2i_in:
word_to_idx = pickle.load(w2i_in)
freqs = np.load(data_path + "/" + "freqs.npy")
freqs = freqs.tolist()
if load_embed_matrix:
embed_matrix = np.load(data_path + "/" + "embedding_matrix.npy")
# df = pd.read_csv(data_path + "/skipgrams.txt", sep="\t", header=None)
df = pd.read_csv(data_path + "/skipgrams.txt", sep="\t", header=None, encoding="utf8")
# Extract data arrays from dataframe
pivot_ids = df[0].values
target_ids = df[1].values
doc_ids = df[2].values
if shuffle_data:
# Shuffle the data
pivot_ids, target_ids, doc_ids = shuffle(pivot_ids, target_ids, doc_ids, random_state=0)
if load_embed_matrix:
return (idx_to_word, word_to_idx, freqs, pivot_ids, target_ids, doc_ids, embed_matrix)
else:
return (idx_to_word, word_to_idx, freqs, pivot_ids, target_ids, doc_ids)
def generate_ldavis_data(data_path, model, idx_to_word, freqs, vocab_size):
"""This method will launch a locally hosted session of
pyLDAvis that will visualize the results of our model
Parameters
----------
data_path : str
Location where your data is stored.
model : Lda2Vec
Loaded lda2vec tensorflow model.
idx_to_word : dict
index to word mapping dictionary
freqs list:
Frequencies of each token.
vocab_size : int
Total size of your vocabulary
"""
doc_embed = model.sesh.run(model.mixture.doc_embedding)
topic_embed = model.sesh.run(model.mixture.topic_embedding)
word_embed = model.sesh.run(model.w_embed.embedding)
# Extract all unique words in order of index 0-vocab_size
vocabulary = []
for k,v in idx_to_word.items():
vocabulary.append(v)
# Read in document lengths
doc_lengths = np.load(data_path + "/doc_lengths.npy")
# The prepare_topics function is a direct copy from Chris Moody
vis_data = prepare_topics(doc_embed, topic_embed, word_embed, np.array(vocabulary), doc_lengths=doc_lengths,
term_frequency=freqs, normalize=True)
prepared_vis_data = pyLDAvis.prepare(**vis_data)
pyLDAvis.show(prepared_vis_data)
def chunks(n, *args):
"""Yield successive n-sized chunks from l."""
# From stackoverflow question 312443
keypoints = []
for i in range(0, len(args[0]), n):
keypoints.append((i, i + n))
random.shuffle(keypoints)
for a, b in keypoints:
yield [arg[a: b] for arg in args]
| 35.941176
| 119
| 0.666567
|
6bd078989a106bd4a40166b0648737e876a49898
| 30,084
|
py
|
Python
|
flameview.py
|
orenbenkiki/flameview
|
70a04da078381ab60f1d138ff6fb985b8540d80d
|
[
"MIT"
] | 13
|
2018-11-03T14:48:41.000Z
|
2020-05-18T09:05:12.000Z
|
flameview.py
|
orenbenkiki/flameview
|
70a04da078381ab60f1d138ff6fb985b8540d80d
|
[
"MIT"
] | null | null | null |
flameview.py
|
orenbenkiki/flameview
|
70a04da078381ab60f1d138ff6fb985b8540d80d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""
Generate a flame graph view.
"""
import re
import sys
from argparse import ArgumentParser
from argparse import Namespace
from argparse import RawDescriptionHelpFormatter
from random import random
from random import seed
from textwrap import dedent
from typing import Any
from typing import Callable
from typing import Dict
from typing import List
from typing import Optional
from typing import TextIO
from typing import Tuple
VERSION = "0.1-b6"
# pylint: disable=too-many-lines
# pylint: disable=missing-docstring
# pylint: disable=too-few-public-methods
# pylint: disable=too-many-instance-attributes
class Node:
_next_index = 0
def __init__(self, name: str, size: Optional[float] = None, tooltip_html: str = '') -> None:
self.index = Node._next_index
self.size = size
self.tooltip_html = tooltip_html
Node._next_index += 1
self.total_size = 0.0
self.name = name
self.label = name
self.klass = 'sum'
self.column = 0
self.columns_span = 0
self.group: Optional[str] = None
self.nodes: Dict[str, 'Node'] = {}
def _main() -> None: # pylint: disable=too-many-locals
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter,
description='Generate a flamegraph view.', epilog=dedent("""
INPUT: A flamegraph file. Each line must be in the format:
name;...;name size [difference] [#tooltip_html]
OUTPUT: An HTML file visualizing the flame graph.
"""))
parser.add_argument('--minpercent', metavar='PERCENT', default='0.1', type=float,
help='The minimal percent of the entries to display; '
'default: 0.1 (1/1000 of the total)')
parser.add_argument('--sortby', metavar='SORT_KEY',
default='name', choices=['name', 'size', 'input'],
help='How to sort nodes:\n'
'name (default) - lexicographically, '
'size - by the size data, '
'input - by input order')
parser.add_argument('--inverted', action='store_true',
help='If specified, generate an inverted (icicles) graph.')
parser.add_argument('--title', metavar='TITLE',
help='An optional title for the HTML document; '
'default: "Flame Graph" or "Icicle Graph"')
parser.add_argument('--sizename', metavar='NAME', default="samples",
help='The name of the size data; default: "samples".')
parser.add_argument('--nodefaultcss', action='store_true',
help='If specified, the default appearance CSS is omitted, '
'probably to avoid interfering with --addcss')
parser.add_argument('--addcss', metavar='CSS', action='append',
help='The name of a CSS file to embed into the output HTML')
parser.add_argument('--colors', metavar='PALETTE', default='hot',
choices=['hot', 'mem', 'io', 'red', 'green', 'blue',
'aqua', 'yellow', 'purple', 'orange'],
help='The color palette to use, subset of flamegraph.pl; '
'default: "hot", other choices: '
'mem, io, red, green, blue, aqua, yellow, purple, orange')
parser.add_argument('--seed', metavar='SEED', default=None, type=int,
help='An optional seed for repeatable random color generation; '
'default: None')
parser.add_argument('--strict', action='store_true',
help='If specified, abort with an error on invalid input lines.')
parser.add_argument('--output', metavar='HTML',
help='The HTML file to write; default: "-", write to standard output')
parser.add_argument('--version', action='store_true',
help='Print the version information (%s) and exit' % VERSION)
parser.add_argument('input', metavar='FLAMEGRAPH', nargs='?',
help='The flamegraph data file to read; '
'default: "-", read from standard input')
args = parser.parse_args()
if args.version:
print('flameview.py: version %s' % VERSION)
sys.exit(0)
seed(args.seed)
root = _load_input_data(args.input, args.strict)
_add_self_nodes(root)
_compute_sizes(root)
_prune_small_nodes(root, args.minpercent)
sizes = _size_tree_names(root)
groups = _compute_tree_groups(root, sizes)
sort_key = {'name': _by_name, 'size': _by_size, 'input': _by_input}[args.sortby]
column_sizes = _compute_tree_column_sizes(root, sort_key)
rows = _compute_tree_rows(root)
_print_output_data(args, groups, column_sizes, rows)
def _load_input_data(path: Optional[str], is_strict: bool) -> Node:
if path is None or path == '-':
return _load_data_file('stdin', is_strict, sys.stdin)
with open(path, 'r') as file:
return _load_data_file(path, is_strict, file)
def _load_data_file(path: str, is_strict: bool, file: TextIO) -> Node:
line_regexp = re.compile(r'''
\A
(.*?)
(?:
\s+
([+]?\d*\.?\d+(?:[eE][-+]?\d+)?)
(?:
\s+
([+-]?\d*\.?\d+(?:[eE][-+]?\d+)?)
)?
)?
(?:
\s+
[#]
\s*
(.*?)
)?
\s*
\Z
''', re.X)
root = Node('all')
ignored = 0
for line_number, line_text in enumerate(file.readlines()):
line_number += 1
match = line_regexp.fullmatch(line_text)
if not match:
if is_strict:
sys.stderr.write('flameview.py: %s:%s: error: invalid line\n' % (path, line_number))
ignored += 1
continue
names_text, size_text, _difference_text, tooltip_text = match.group(1, 2, 3, 4)
size = None if size_text is None else float(size_text)
_add_node(names_text.split(';'), root, size, tooltip_text or '')
if ignored > 0:
if is_strict:
sys.exit(1)
sys.stderr.write('flameview.py: %s: warning: ignored %s invalid lines\n' % (path, ignored))
return root
def _add_node(names: List[str], parent: Node, size: Optional[float], tooltip_html: str) -> None:
name = names[0]
name_node = parent.nodes.get(name)
if name_node is None:
name_node = parent.nodes[name] = Node(name)
if len(names) > 1:
_add_node(names[1:], name_node, size, tooltip_html)
return
if size is not None:
if name_node.size is None:
name_node.size = size
else:
name_node.size += size
name_node.tooltip_html = tooltip_html
SELF_NAME = "(self)"
def _add_self_nodes(parent: Node) -> None:
for node in parent.nodes.values():
if len(node.name) == 1:
node.name = parent.name + ';' + node.name
_add_self_nodes(node)
if not node.nodes:
node.klass = 'leaf'
continue
assert node.klass == 'sum'
if node.size is None:
continue
self_node = Node('%s;%s' % (node.name, SELF_NAME), node.size, node.tooltip_html)
self_node.label = SELF_NAME
self_node.klass = 'self'
node.nodes[SELF_NAME] = self_node
node.size = None
def _compute_sizes(parent: Node) -> None:
parent.total_size = 0.0
if parent.size is not None:
parent.total_size += parent.size
for node in parent.nodes.values():
_compute_sizes(node)
parent.total_size += node.total_size
def _prune_small_nodes(root: Node, min_percent: float) -> None:
if min_percent <= 0 or min_percent >= 100:
return
min_size = root.total_size * min_percent / 100.0
_prune_small_tree(root, min_size)
def _prune_small_tree(parent: Node, min_size: float) -> None:
assert parent.total_size >= min_size
large_nodes: Dict[str, Node] = {}
total_small_nodes_size = 0.0
for name, node in parent.nodes.items():
if node.total_size < min_size:
total_small_nodes_size += node.total_size
else:
large_nodes[name] = node
_prune_small_tree(node, min_size)
if len(large_nodes) == len(parent.nodes):
return
small_node = Node('(small)', total_small_nodes_size)
small_node.total_size = total_small_nodes_size
parent.nodes = large_nodes
parent.nodes['...'] = small_node
def _size_tree_names(root: Node) -> Dict[str, int]:
sizes: Dict[str, int] = {}
_size_names(root, sizes)
return sizes
def _size_names(parent: Node, sizes: Dict[str, int]):
for node in parent.nodes.values():
sizes[node.name] = sizes.get(node.name, 0) + 1
_size_names(node, sizes)
def _compute_tree_groups(root: Node, sizes: Dict[str, int]) -> Dict[str, List[int]]:
groups: Dict[str, List[int]] = {}
_compute_groups(root, sizes, groups)
return groups
def _compute_groups(parent: Node, sizes: Dict[str, int], groups: Dict[str, List[int]]) -> None:
for node in parent.nodes.values():
_compute_groups(node, sizes, groups)
if sizes[node.name] == 1:
continue
node.group = node.name
group = groups.get(node.name, None)
if group is None:
group = groups[node.name] = []
group.append(node.index)
def _by_name(node: Node) -> str:
return node.name
def _by_size(node: Node) -> float:
return -node.total_size
def _by_input(node: Node) -> int:
return node.index
def _compute_tree_column_sizes(parent: Node, sort_key: Callable[[Node], Any]) -> List[float]:
column_sizes: List[float] = []
_compute_column_sizes(parent, sort_key, column_sizes)
return column_sizes
def _compute_column_sizes(parent: Node, sort_key: Callable[[Node], Any],
column_sizes: List[float]) -> None:
parent.column = len(column_sizes)
if parent.nodes:
assert parent.size is None
for node in sorted(parent.nodes.values(), key=sort_key):
_compute_column_sizes(node, sort_key, column_sizes)
parent.columns_span = node.column + node.columns_span - parent.column
else:
assert parent.size is not None
column_sizes.append(parent.size)
parent.columns_span = 1
def _compute_tree_rows(root: Node) -> List[List[Node]]:
rows: List[List[Node]] = []
_collect_unsorted_rows(root, rows, 0)
_sort_rows(rows)
return rows
def _collect_unsorted_rows(parent: Node, rows: List[List[Node]], level: int) -> None:
if len(rows) == level:
rows.append([])
row = rows[level]
row.append(parent)
for node in parent.nodes.values():
_collect_unsorted_rows(node, rows, level + 1)
def _sort_rows(rows: List[List[Node]]) -> None:
for row in rows:
row.sort(key=_by_column)
def _by_column(node: Node) -> int:
return node.column
BEFORE_TITLE = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8"/>
"""[1:]
BEFORE_CSS = """
<style>
/*** Layout: ***/
* {
box-sizing: border-box;
}
#width, #graph, #row, #height {
margin: 0;
padding: 0;
}
.row {
position: relative;
}
.leaf,
.self,
.sum {
position: absolute;
}
.label {
min-width: 0;
overflow: hidden;
}
.tooltip {
visibility: hidden;
position: absolute;
z-index: 1;
}
"""
DEFAULT_APPEARANCE_CSS = """
/*** Default Appearance: ***/
* {
font-family: sans-serif;
}
.leaf,
.self,
.sum {
border-width: 1px;
border-style: solid;
border-radius: 4px;
border-color: black;
text-align: center;
}
.group_hover .label {
background-color: ivory !important;
}
.name {
font-style: italic;
}
.selected .label {
font-weight: bold;
}
.selected .name {
font-weight: bold;
}
.tooltip {
border-style: solid;
border-width: 2px;
border-radius: 4px;
border-color: black;
background-color: ivory;
padding: 0.25em 0.5em 0.25em 0.5em;
text-align: left;
}
.tooltipped .leaf:hover .tooltip,
.tooltipped .self:hover .tooltip,
.tooltipped .sum:hover .tooltip {
visibility: visible;
}
"""
BEFORE_JAVASCRIPT = """
</style>
<script>
/*jslint browser: true*/
/*** Generated Data: ***/
"""[1:]
BEFORE_HTML = """
/*** Behavior: ***/
// The total size of everything (for computing percentages).
// Computed on load.
var total_size = null;
// The list of currently selected cell ids.
var selected_cell_ids = [];
// The id of the root cell that covers everything (by convention).
var root_id = "N0";
// Compute which columns are visible given the current selection.
function compute_visible_columns_mask() {
"use strict";
if (selected_cell_ids.length === 1) {
var selected_cell_id = selected_cell_ids[0];
return cells_data[selected_cell_id].columns_mask;
}
var lowest_cell_id = undefined;
var lowest_level = undefined;
selected_cell_ids.forEach(function (cell_id) {
var cell_level = cells_data[cell_id].level;
if (lowest_cell_id === undefined || lowest_level > cell_level) {
lowest_cell_id = cell_id;
lowest_level = cell_level;
}
});
var visible_columns_mask = cells_data[lowest_cell_id].columns_mask.slice();
selected_cell_ids.forEach(function (cell_id) {
var group_id = cells_data[cell_id].group_id;
var columns_mask = (
group_id
? groups_data[group_id].columns_mask
: cells_data[cell_id].columns_mask
);
columns_mask.forEach(function (is_column_in_group, column_index) {
visible_columns_mask[column_index] *= is_column_in_group;
});
});
return visible_columns_mask;
}
// Compute the total size of the visible columns.
function compute_visible_size(visible_columns_mask) {
"use strict";
var visible_size = 0;
column_sizes.forEach(function (column_size, column_index) {
visible_size += column_size * visible_columns_mask[column_index];
});
return visible_size;
}
// Convert a number to a human-friendly precision.
//
// Allow no more than 2 decimal digits after the `.`, unless the number is very
// small, in which case allow as many as needed so that the first two
// significant digits are visible.
function stringify(number) {
"use strict";
if (number === 0) {
return number;
}
if (number < 0) {
return "-" + stringify(-number);
}
var precision = 1;
var result = 0;
while (result === 0) {
precision *= 10;
result = Math.round(number * precision) / precision;
}
precision *= 10;
return Math.round(number * precision) / precision;
}
// Update the visibility and width of a specific cell.
function update_cell(visible_columns_mask,
scale_factor, visible_size, cell_id) {
"use strict";
var cell_data = cells_data[cell_id];
var cell = document.getElementById(cell_id);
var cell_offset_is_done = false;
var cell_offset = 0;
var cell_size = 0;
cell_data.columns_mask.forEach(function (is_column_used, column_index) {
if (visible_columns_mask[column_index] > 0) {
if (is_column_used > 0) {
cell_offset_is_done = true;
cell_size += column_sizes[column_index];
} else if (!cell_offset_is_done) {
cell_offset += column_sizes[column_index];
}
}
});
if (!cell_offset_is_done) {
cell.style.display = "none";
return;
}
cell.style.display = null;
var left = Math.round(cell_offset * scale_factor);
cell.style.left = left + "px";
var width = Math.round((cell_offset + cell_size) * scale_factor) - left;
cell.style.width = width + "px";
var computed = cell.querySelector(".computed");
if (!computed) {
return;
}
var computed_text = stringify(cell_size);
if (cell_size === total_size) {
computed.innerText = computed_text;
return;
}
if (visible_size !== total_size && cell_size !== visible_size) {
var percentage_of_visible = 100 * cell_size / visible_size;
computed_text += "<br/>" + stringify(percentage_of_visible) +
"% out of: " + stringify(visible_size) + " visible";
}
var percentage_of_total = 100 * cell_size / total_size;
computed_text += "<br/>" + stringify(percentage_of_total) +
"% out of: " + stringify(total_size) + " total";
computed.innerHTML = computed_text;
}
// Update all the cells visibility and width.
//
// Must be done every time the selected cell and/or the display width change.
function update_cells() {
"use strict";
var visible_columns_mask = compute_visible_columns_mask();
var visible_size = compute_visible_size(visible_columns_mask);
var graph_width = document.getElementById("width").clientWidth;
var graph = document.getElementById("graph");
graph.style.width = graph_width + "px";
var scale_factor = (graph_width - 2) / visible_size;
Object.keys(cells_data).forEach(function (cell_id) {
update_cell(visible_columns_mask, scale_factor, visible_size, cell_id);
});
}
// Cell hover highlights all cells in a group.
// The cell itself is highlighted using the :hover CSS selector.
// The other cells in the group are highlighted using the group_hover class.
// Highlight all group cells on entry.
function on_over(event) {
"use strict";
var cell = event.currentTarget;
var group_id = cells_data[cell.id].group_id;
if (group_id) {
groups_data[group_id].cell_ids.forEach(function (group_cell_id) {
var group_cell = document.getElementById(group_cell_id);
group_cell.classList.add("group_hover");
});
} else {
cell.classList.add("group_hover");
}
}
// Unhighlight all group cells on exit.
function on_out(event) {
"use strict";
var cell = event.currentTarget;
var group_id = cells_data[cell.id].group_id;
if (group_id) {
groups_data[group_id].cell_ids.forEach(function (group_cell_id) {
var group_cell = document.getElementById(group_cell_id);
group_cell.classList.remove("group_hover");
});
} else {
cell.classList.remove("group_hover");
}
}
// Select a cell for filtering the visible graph content.
//
// A simple click just shows the selected cell columns,
// a control-click adds/removes selected cells,
// an alt-click toggles tooltips.
//
// When multiple cells are selected, the lowest-level one restricts the set of
// columns, and each additional higher-level cell further restricts the columns
// to these covered by the group the cell belongs to.
function on_click(event) {
"use strict";
var cell = event.currentTarget;
if (event.altKey) {
document.getElementById("graph").classList.add("tooltipped");
return;
}
if (!event.ctrlKey) {
selected_cell_ids.forEach(function (cell_id) {
document.getElementById(cell_id).classList.remove("selected");
});
selected_cell_ids = [cell.id];
cell.classList.add("selected");
update_cells();
return;
}
var new_selected_cell_ids = [];
selected_cell_ids.forEach(function (cell_id) {
if (cell_id !== cell.id) {
new_selected_cell_ids.push(cell_id);
}
});
if (new_selected_cell_ids.length === selected_cell_ids.length) {
selected_cell_ids.push(cell.id);
cell.classList.add("selected");
update_cells();
return;
}
cell.classList.remove("selected");
selected_cell_ids = new_selected_cell_ids;
if (new_selected_cell_ids.length === 0) {
selected_cell_ids = [root_id];
document.getElementById(root_id).classList.add("selected");
}
update_cells();
}
// Disable tooltips.
function disable_tooltip(event) {
"use strict";
if (event.altKey) {
document.getElementById("graph").classList.remove("tooltipped");
event.stopPropagation();
}
}
// Attach handlers to table cells.
function register_handlers() {
"use strict";
Object.keys(cells_data).forEach(function (cell_id) {
var cell = document.getElementById(cell_id);
cell.onclick = on_click;
cell.onmouseover = on_over;
cell.onmouseout = on_out;
var tooltip = cell.querySelector(".tooltip");
if (tooltip) {
tooltip.onclick = disable_tooltip;
}
});
}
function compute_groups_columns_masks() {
"use strict";
Object.keys(groups_data).forEach(function (group_id) {
var group_data = groups_data[group_id];
group_data.cell_ids.forEach(function (cell_id) {
var cell_data = cells_data[cell_id];
if (!group_data.columns_mask) {
group_data.columns_mask = cell_data.columns_mask.slice();
} else {
var columns_mask = cell_data.columns_mask;
columns_mask.forEach(function (is_column_used, column_index) {
if (is_column_used > 0) {
group_data.columns_mask[column_index] = 1;
}
});
}
});
});
}
function on_load() {
"use strict";
register_handlers();
total_size = compute_visible_size(cells_data[root_id].columns_mask);
compute_groups_columns_masks();
on_click({
"currentTarget": document.getElementById(root_id),
"ctrlKey": false
});
}
// On resize, update all the cell widths.
window.onresize = update_cells;
// Properly initialize everything on load.
window.onload = on_load;
</script>
</head>
<body>
"""[1:]
AFTER_HTML = """
<div id="width"></div>
</body>
</html>
"""[1:]
def _print_output_data(args: Namespace, groups: Dict[str, List[int]],
column_sizes: List[float], rows: List[List[Node]]) -> None:
if args.output is None or args.output == '-':
_print_output_file(sys.stdout, args, groups, column_sizes, rows)
else:
with open(args.output, 'w') as file:
_print_output_file(file, args, groups, column_sizes, rows)
def _print_output_file(file: TextIO, args: Namespace, groups: Dict[str, List[int]],
column_sizes: List[float], rows: List[List[Node]]) -> None:
file.write(BEFORE_TITLE)
title = args.title
if title is None:
if args.inverted:
title = "Icicle Graph"
else:
title = "Flame Graph"
_print_title(file, title)
file.write(BEFORE_CSS)
if not args.nodefaultcss:
file.write(DEFAULT_APPEARANCE_CSS)
for css_path in args.addcss or []:
try:
with open(css_path, 'r') as css_file:
file.write(css_file.read())
except FileNotFoundError:
sys.stderr.write('flameview.py: No such file or directory: %s\n' % css_path)
sys.exit(1)
file.write(BEFORE_JAVASCRIPT)
_print_groups_data(file, groups)
_print_cells_data(file, rows, len(column_sizes))
_print_column_sizes(file, column_sizes)
file.write(BEFORE_HTML)
_print_h1(file, title)
if args.inverted:
_print_table(file, args.sizename, args.colors, rows)
else:
_print_table(file, args.sizename, args.colors, list(reversed(rows)))
file.write(AFTER_HTML)
def _print_title(file: TextIO, title: str) -> None:
file.write('<title>%s</title>' % title)
def _print_groups_data(file: TextIO, groups: Dict[str, List[int]]) -> None:
file.write(dedent("""
// Data for each cells group:
// cell_ids: The ids of the group cells.
// On load, the following is computed for each group:
// columns_mask: A 0/1 mask of all the columns used by the group cells.
var groups_data = {
"""))
group_lines = [' "%s": {"cell_ids": ["%s"]}'
% (group_name, '", "'.join(['N' + str(id) for id in sorted(cell_ids)]))
for group_name, cell_ids in sorted(groups.items())]
file.write(',\n'.join(group_lines))
file.write('\n};\n\n')
def _print_cells_data(file: TextIO, rows: List[List[Node]], columns_count: int) -> None:
file.write(dedent("""
// Data for each cell:
// level: The stack nesting level.
// columns_mask: A 0/1 mask of all the columns used by the cell.
// group_id: The group the cell belongs to, if any.
var cells_data = {
""")[1:-1])
is_first = True
for level, row in enumerate(rows):
for node in row:
if not is_first:
file.write(',')
file.write('\n ')
_print_cell_data(file, node, columns_count, level)
is_first = False
file.write('\n};\n')
def _print_cell_data(file: TextIO, node: Node, columns_count: int, level: int) -> None:
file.write('"N%s": {\n' % node.index)
file.write(' "level": %s' % level)
file.write(',\n "columns_mask": [%s]'
% _columns_mask(node.column, node.columns_span, columns_count))
if node.group:
file.write(',\n "group_id": "%s"' % node.group)
file.write('\n }')
def _columns_mask(column: int, columns_span: int, columns_count: int) -> str:
prefix = ["0"] * column
middle = ["1"] * columns_span
suffix = ["0"] * (columns_count - column - columns_span)
return ', '.join(prefix + middle + suffix)
def _print_column_sizes(file, column_sizes: List[float]) -> None:
file.write(dedent("""
// The size of each leaf/self cell (that is, a column).
var column_sizes = [%s];
""") % ', '.join([str(size) for size in column_sizes]))
file.write('\n')
def _print_h1(file: TextIO, title: str) -> None:
file.write('<h1 id="title">%s</h1>\n' % title)
def _print_table(file: TextIO, sizename: str, palette: str, rows: List[List[Node]]) -> None:
file.write('<div id="graph" class="tooltipped">\n')
for row in rows:
_print_row(file, sizename, palette, row)
file.write('</div>\n')
def _print_row(file: TextIO, sizename: str, palette: str, row: List[Node]) -> None:
file.write('<div class="row">\n')
for node in row:
_print_node(file, sizename, palette, node)
file.write('<div class="height"> </div>\n')
file.write('</div>\n')
def _print_node(file: TextIO, sizename: str, palette: str, node: Node) -> None:
file.write('<div id="N%s" class="%s"' % (node.index, node.klass))
file.write(' style="background-color: %s">\n' % _node_color(node, palette))
_print_tooltip(file, sizename, node)
_print_label(file, node)
file.write('</div>\n')
def _node_color(node: Node, palette: str) -> str:
if len(node.label) == 1:
red, green, blue = 160.0, 160.0, 160.0
else:
red, green, blue = {
'hot': _hot_color,
'mem': _mem_color,
'io': _io_color,
'red': _red_color,
'green': _green_color,
'blue': _blue_color,
'aqua': _aqua_color,
'yellow': _yellow_color,
'purple': _purple_color,
'orange': _orange_color,
}[palette]()
return 'rgb(%d, %d, %d)' % (red, green, blue)
# Palettes were copied from flamegraph.pl:
def _hot_color() -> Tuple[float, float, float]:
red = 205 + 50 * random()
green = 230 * random()
blue = 55 * random()
return red, green, blue
def _mem_color() -> Tuple[float, float, float]:
red = 0.0
green = 190 + 50 * random()
blue = 210 * random()
return red, green, blue
def _io_color() -> Tuple[float, float, float]:
red = 80 + 60 * random()
green = red
blue = 190 + 55 * random()
return red, green, blue
def _red_color() -> Tuple[float, float, float]:
fraction = random()
red = 200 + 55 * fraction
green = 50 + 80 * fraction
blue = green
return red, green, blue
def _green_color() -> Tuple[float, float, float]:
fraction = random()
red = 50 + 60 * fraction
green = 200 + 55 * fraction
blue = red
return red, green, blue
def _blue_color() -> Tuple[float, float, float]:
fraction = random()
red = 80 + 60 * fraction
green = red
blue = 205 + 50 * fraction
return red, green, blue
def _yellow_color() -> Tuple[float, float, float]:
fraction = random()
red = 175 + 55 * fraction
green = red
blue = 50 + 20 * fraction
return red, green, blue
def _purple_color() -> Tuple[float, float, float]:
fraction = random()
red = 190 + 65 * fraction
green = 80 + 60 * fraction
blue = red
return red, green, blue
def _aqua_color() -> Tuple[float, float, float]:
fraction = random()
red = 50 + 60 * fraction
green = 165 + 55 * fraction
blue = green
return red, green, blue
def _orange_color() -> Tuple[float, float, float]:
fraction = random()
red = 190 + 65 * fraction
green = 90 + 65 * fraction
blue = 0.0
return red, green, blue
def _print_tooltip(file: TextIO, sizename: str, node: Node) -> None:
file.write('<div class="tooltip">\n')
file.write('<span class="name">%s</span><br/>\n' % _escape(node.name))
file.write('<hr/>\n')
file.write('<div class="basic">%s: <span class="computed"></span></div>\n' % sizename)
if node.tooltip_html:
file.write('<div class="extra">\n')
file.write(node.tooltip_html)
file.write('</div>\n')
file.write('</div>\n')
def _print_label(file: TextIO, node: Node) -> None:
file.write('<div class="label">%s</div>\n' % _escape(node.label))
def _escape(text: str) -> str:
return text.replace('&', '&').replace('<', '<').replace('>', '>')
if __name__ == '__main__':
_main()
| 29.350244
| 100
| 0.610989
|
bf240d4b1e3780d0c71a0499a018a4efb2596e2c
| 6,553
|
py
|
Python
|
Widen/LC123_Best_Time_to_Buy_and_Sell_Stock_III.py
|
crazywiden/Leetcode_daily_submit
|
15637e260ab547022ac0c828dd196337bd8d50a3
|
[
"MIT"
] | null | null | null |
Widen/LC123_Best_Time_to_Buy_and_Sell_Stock_III.py
|
crazywiden/Leetcode_daily_submit
|
15637e260ab547022ac0c828dd196337bd8d50a3
|
[
"MIT"
] | null | null | null |
Widen/LC123_Best_Time_to_Buy_and_Sell_Stock_III.py
|
crazywiden/Leetcode_daily_submit
|
15637e260ab547022ac0c828dd196337bd8d50a3
|
[
"MIT"
] | null | null | null |
"""
LC123. Best Time to Buy and Sell Stock III
Say you have an array for which the ith element is the price of a given stock on day i.
Design an algorithm to find the maximum profit. You may complete at most two transactions.
Note: You may not engage in multiple transactions at the same time (i.e., you must sell the stock before you buy again).
"""
# brutal force -- TLE
class Solution:
def maxProfit(self, prices) -> int:
if len(prices) == 0:
return 0
res = 0
for i in range(1, len(prices)+1):
left_profit = self.cal_profit(prices[:i])
right_profit = self.cal_profit(prices[i:])
# print(left_profit, right_profit, i, prices[i])
res = max(res, left_profit + right_profit)
return res
def cal_profit(self, arr):
if len(arr) == 0:
return 0
res = 0
curr_min, curr_max = arr[0], arr[0]
for i in range(1, len(arr)):
if arr[i] <= curr_min:
res = max(res, curr_max - curr_min)
curr_max = arr[i]
curr_min = arr[i]
else:
curr_max = max(arr[i], curr_max)
res = max(curr_max-curr_min, res)
return res
# two array dp
# finnaly!!
# Runtime: 116 ms, faster than 12.79% of Python3 online submissions for Best Time to Buy and Sell Stock III.
# Memory Usage: 18.4 MB, less than 36.36% of Python3 online submissions for Best Time to Buy and Sell Stock III.
class Solution:
def maxProfit(self, prices) -> int:
if len(prices) == 0:
return 0
k = 2
N = len(prices)
res = 0
# define state
global_profit = [[0 for _ in range(k)] for _ in range(N)]
local_profit = [[0 for _ in range(k)] for _ in range(N)]
# update
for i in range(1, N):
for j in range(k):
if j == 0:
local_profit[i][j] = max(local_profit[i-1][j]-prices[i-1]+prices[i], 0)
else:
local_profit[i][j] = max(global_profit[i-1][j-1], local_profit[i-1][j])-prices[i-1]+prices[i]
global_profit[i][j] = max(global_profit[i-1][j], local_profit[i][j])
return global_profit[-1][-1]
# find min-max interval solution
# Runtime: 72 ms, faster than 98.49% of Python3 online submissions for Best Time to Buy and Sell Stock III.
# Memory Usage: 13.9 MB, less than 72.73% of Python3 online submissions for Best Time to Buy and Sell Stock III.
class Solution:
def maxProfit(self, prices: List[int]) -> int:
### find min-max intervals
### if 0: return 0
### if 2: return max 2, or break top 1
### if 1: break
if not prices:
return 0
prices.append(-float('inf'))
cur_min,cur_max = 0,0
g_min,g_max = 0,0
for i in range(1,len(prices)):
if prices[i]<=prices[cur_min]:
if prices[cur_max]-prices[cur_min] > prices[g_max]-prices[g_min]:
g_max,g_min = cur_max,cur_min
cur_max,cur_min = i,i
elif prices[i]>=prices[cur_max]:
cur_max = i
#print(g_max,g_min)
if prices[g_max]==prices[g_min]: return 0
cur_min,cur_max = 0,0
g1_min,g1_max = 0,0
for i in range(1,g_min+1):
if prices[i]<=prices[cur_min]:
if prices[cur_max]-prices[cur_min] > prices[g1_max]-prices[g1_min]:
g1_max,g1_min = cur_max,cur_min
cur_max,cur_min = i,i
elif prices[i]>=prices[cur_max]:
cur_max = i
#print(g1_max,g1_min)
cur_min,cur_max = g_max+1,g_max+1
g2_min,g2_max = g_max+1,g_max+1
for i in range(g_max+1,len(prices)):
if prices[i]<=prices[cur_min]:
if prices[cur_max]-prices[cur_min] > prices[g2_max]-prices[g2_min]:
g2_max,g2_min = cur_max,cur_min
cur_max,cur_min = i,i
elif prices[i]>=prices[cur_max]:
cur_max = i
#print(g2_max,g2_min)
gg_max,gg_min = g1_max,g1_min
if prices[g2_max]-prices[g2_min] > prices[g1_max]-prices[g1_min]:
gg_max,gg_min = g2_max,g2_min
#print(gg_max,gg_min)
d_min,d_max = g_min,g_min
cur_min,cur_max = g_min,g_min
for i in range(g_min+1,g_max+1):
if prices[i]<=prices[i-1]:
cur_min = i
else:
if prices[cur_min]-prices[cur_max]<prices[d_min]-prices[d_max]:
d_min,d_max = cur_min,cur_max
cur_min,cur_max = i,i
#print(d_max,d_min)
return max(prices[g_max]-prices[g_min]+prices[gg_max]-prices[gg_min], prices[d_max]-prices[g_min]+prices[g_max]-prices[d_min])
# a more elegant solution
class Solution:
def maxProfit(self, prices: List[int]) -> int:
firstMin = secondMin = 999999999
firstProfit = secondProfit = 0
for price in prices:
if price < firstMin:
firstMin = price
if price - firstMin > firstProfit:
firstProfit = price - firstMin
if price - firstProfit < secondMin:
secondMin = price - firstProfit
if price - secondMin >= secondProfit:
secondProfit = price - secondMin
return secondProfit
# another dp
# Runtime: 88 ms, faster than 68.34% of Python3 online submissions for Best Time to Buy and Sell Stock III.
# Memory Usage: 13.9 MB, less than 72.73% of Python3 online submissions for Best Time to Buy and Sell Stock III.
class Solution:
def maxProfit(self, prices: List[int]) -> int:
if not prices:
return 0
l = len(prices)
k = 2
dp = [[0] * l for _ in range(k+1)]
for i in range(1, k+1):
# money spent at first day
prev = dp[i - 1][0] - prices[0]
for j in range(1, l):
# money spent if sell stock today
deal = prev + prices[j]
# compare money spent if don't sell stock today with sell stock today
dp[i][j] = max(dp[i][j - 1], deal)
# compare i - 1 deals during j days, and don't buy stock today
# with i - 1 deals during j days, and buy stock today
prev = max(prev, dp[i - 1][j] - prices[j])
return dp[-1][-1]
| 37.878613
| 134
| 0.551198
|
c2dd068f5530e690bc2d90249c0be7009f86c8a9
| 5,211
|
py
|
Python
|
utils/swift_build_support/swift_build_support/targets.py
|
adrian-prantl/swift
|
4cee546841b6f5cf87b3df60efdeaf6bc7b59036
|
[
"Apache-2.0"
] | null | null | null |
utils/swift_build_support/swift_build_support/targets.py
|
adrian-prantl/swift
|
4cee546841b6f5cf87b3df60efdeaf6bc7b59036
|
[
"Apache-2.0"
] | null | null | null |
utils/swift_build_support/swift_build_support/targets.py
|
adrian-prantl/swift
|
4cee546841b6f5cf87b3df60efdeaf6bc7b59036
|
[
"Apache-2.0"
] | null | null | null |
# swift_build_support/targets.py - Build target helpers -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import os
import platform
class StdlibDeploymentTarget(object):
class OSX(object):
x86_64 = 'macosx-x86_64'
allArchs = [x86_64]
class iOS(object): # noqa
armv7 = 'iphoneos-armv7'
armv7s = 'iphoneos-armv7s'
arm64 = 'iphoneos-arm64'
allArchs = [armv7, armv7s, arm64]
class iOSSimulator(object): # noqa
i386 = 'iphonesimulator-i386'
x86_64 = 'iphonesimulator-x86_64'
allArchs = [i386, x86_64]
class AppleTV(object):
arm64 = 'appletvos-arm64'
allArchs = [arm64]
class AppleTVSimulator(object):
x86_64 = 'appletvsimulator-x86_64'
allArchs = [x86_64]
class AppleWatch(object):
armv7k = 'watchos-armv7k'
allArchs = [armv7k]
class AppleWatchSimulator(object):
i386 = 'watchsimulator-i386'
allArchs = [i386]
class Linux(object):
x86_64 = 'linux-x86_64'
armv6 = 'linux-armv6'
armv7 = 'linux-armv7'
aarch64 = 'linux-aarch64'
ppc64 = 'linux-ppc64'
ppc64le = 'linux-ppc64le'
s390x = 'linux-s390x'
allArchs = [x86_64, armv6, armv7, aarch64, ppc64, ppc64le, s390x]
class FreeBSD(object):
amd64 = 'freebsd-x86_64'
allArchs = [amd64]
class Cygwin(object):
x86_64 = 'cygwin-x86_64'
allArchs = [x86_64]
class Android(object):
armv7 = 'android-armv7'
allArchs = [armv7]
@staticmethod
def host_target():
"""
Return the host target for the build machine, if it is one of
the recognized targets. Otherwise, return None.
"""
system = platform.system()
machine = platform.machine()
if system == 'Linux':
if machine == 'x86_64':
return StdlibDeploymentTarget.Linux.x86_64
elif machine.startswith('armv7'):
# linux-armv7* is canonicalized to 'linux-armv7'
return StdlibDeploymentTarget.Linux.armv7
elif machine.startswith('armv6'):
# linux-armv6* is canonicalized to 'linux-armv6'
return StdlibDeploymentTarget.Linux.armv6
elif machine == 'aarch64':
return StdlibDeploymentTarget.Linux.aarch64
elif machine == 'ppc64':
return StdlibDeploymentTarget.Linux.ppc64
elif machine == 'ppc64le':
return StdlibDeploymentTarget.Linux.ppc64le
elif machine == 's390x':
return StdlibDeploymentTarget.Linux.s390x
elif system == 'Darwin':
if machine == 'x86_64':
return StdlibDeploymentTarget.OSX.x86_64
elif system == 'FreeBSD':
if machine == 'amd64':
return StdlibDeploymentTarget.FreeBSD.amd64
elif system == 'CYGWIN_NT-10.0':
if machine == 'x86_64':
return StdlibDeploymentTarget.Cygwin.x86_64
return None
@staticmethod
def default_stdlib_deployment_targets():
"""
Return targets for the Swift stdlib, based on the build machine.
If the build machine is not one of the recognized ones, return None.
"""
host_target = StdlibDeploymentTarget.host_target()
if host_target is None:
return None
# OS X build machines configure all Darwin platforms by default.
# Put iOS native targets last so that we test them last
# (it takes a long time).
if host_target == StdlibDeploymentTarget.OSX.x86_64:
return [host_target] + \
StdlibDeploymentTarget.iOSSimulator.allArchs + \
StdlibDeploymentTarget.AppleTVSimulator.allArchs + \
StdlibDeploymentTarget.AppleWatchSimulator.allArchs + \
StdlibDeploymentTarget.iOS.allArchs + \
StdlibDeploymentTarget.AppleTV.allArchs + \
StdlibDeploymentTarget.AppleWatch.allArchs
else:
# All other machines only configure their host stdlib by default.
return [host_target]
def install_prefix():
"""
Returns the default path at which built Swift products (like bin, lib,
and include) will be installed, based on the host machine's operating
system.
"""
if platform.system() == 'Darwin':
return '/Applications/Xcode.app/Contents/Developer/Toolchains/' + \
'XcodeDefault.xctoolchain/usr'
else:
return '/usr'
def darwin_toolchain_prefix(darwin_install_prefix):
"""
Given the install prefix for a Darwin system, and assuming that that path
is to a .xctoolchain directory, return the path to the .xctoolchain
directory.
"""
return os.path.split(darwin_install_prefix)[0]
| 32.981013
| 77
| 0.618691
|
49c69baaed5416cdfed4771994d0617e7b9de16c
| 41,856
|
py
|
Python
|
cinder/tests/unit/image/test_glance.py
|
alexisries/openstack-cinder
|
7cc6e45c5ddb8bf771bdb01b867628e41761ae11
|
[
"Apache-2.0"
] | 2
|
2019-05-24T14:13:50.000Z
|
2019-05-24T14:21:13.000Z
|
cinder/tests/unit/image/test_glance.py
|
vexata/cinder
|
7b84c0842b685de7ee012acec40fb4064edde5e9
|
[
"Apache-2.0"
] | null | null | null |
cinder/tests/unit/image/test_glance.py
|
vexata/cinder
|
7b84c0842b685de7ee012acec40fb4064edde5e9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import itertools
import ddt
import glanceclient.exc
from keystoneauth1.loading import session as ks_session
from keystoneauth1 import session
import mock
from oslo_config import cfg
from cinder import context
from cinder import exception
from cinder.image import glance
from cinder import service_auth
from cinder import test
from cinder.tests.unit.glance import stubs as glance_stubs
CONF = cfg.CONF
class NullWriter(object):
"""Used to test ImageService.get which takes a writer object."""
def write(self, *arg, **kwargs):
pass
class TestGlanceSerializer(test.TestCase):
def test_serialize(self):
metadata = {'name': 'image1',
'visibility': 'public',
'protected': True,
'foo': 'bar',
'properties': {
'prop1': 'propvalue1',
'mappings': [
{'device': 'bbb'},
{'device': 'yyy'}],
'block_device_mapping': [
{'device_name': '/dev/fake'},
{'device_name': '/dev/fake0'}]}}
converted_expected = {
'name': 'image1',
'visibility': 'public',
'protected': True,
'foo': 'bar',
'properties': {
'prop1': 'propvalue1',
'mappings':
'[{"device": "bbb"}, '
'{"device": "yyy"}]',
'block_device_mapping':
'[{"device_name": "/dev/fake"}, '
'{"device_name": "/dev/fake0"}]'}}
converted = glance._convert_to_string(metadata)
self.assertEqual(converted_expected, converted)
self.assertEqual(metadata, glance._convert_from_string(converted))
@ddt.ddt
class TestGlanceImageService(test.TestCase):
"""Tests the Glance image service.
At a high level, the translations involved are:
1. Glance -> ImageService - This is needed so we can support
multiple ImageServices (Glance, Local, etc)
2. ImageService -> API - This is needed so we can support multiple
APIs (OpenStack, EC2)
"""
NOW_GLANCE_OLD_FORMAT = "2010-10-11T10:30:22"
NOW_GLANCE_FORMAT = "2010-10-11T10:30:22.000000"
class tzinfo(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
return datetime.timedelta()
NOW_DATETIME = datetime.datetime(2010, 10, 11, 10, 30, 22, tzinfo=tzinfo())
def setUp(self):
super(TestGlanceImageService, self).setUp()
client = glance_stubs.StubGlanceClient()
service_catalog = [{u'type': u'image', u'name': u'glance',
u'endpoints': [{
u'publicURL': u'http://example.com:9292'}]}]
self.service = self._create_image_service(client)
self.context = context.RequestContext('fake', 'fake', auth_token=True)
self.context.service_catalog = service_catalog
self.mock_object(glance.time, 'sleep', return_value=None)
def _create_image_service(self, client):
def _fake_create_glance_client(context, netloc, use_ssl):
return client
self.mock_object(glance, '_create_glance_client',
_fake_create_glance_client)
client_wrapper = glance.GlanceClientWrapper('fake', 'fake_host', 9292)
return glance.GlanceImageService(client=client_wrapper)
@staticmethod
def _make_fixture(**kwargs):
fixture = {'name': None,
'properties': {},
'status': None,
'visibility': None,
'protected': None}
fixture.update(kwargs)
return fixture
@staticmethod
def _make_image_member_fixtures(**kwargs):
fixtures = []
fixture = {'status': None,
'image_id': None,
'member_id': None,
'created_at': '2018-03-14T21:48:13Z',
'updated_at': '2018-03-14T21:50:51Z',
'schema': '/v2/schemas/member'}
fixture.update(kwargs)
fixtures.append(fixture)
return fixtures
def _make_datetime_fixture(self):
return self._make_fixture(created_at=self.NOW_GLANCE_FORMAT,
updated_at=self.NOW_GLANCE_FORMAT,
deleted_at=self.NOW_GLANCE_FORMAT)
def test_list_members(self):
fixture = {'status': None,
'image_id': None,
'member_id': None,
'created_at': '2018-03-14T21:48:13Z',
'updated_at': '2018-03-14T21:50:51Z',
'schema': '/v2/schemas/member'}
image_id = '97c1ef11-3a64-4756-9f8c-7f9fb5abe09f'
member_id = '50fcc79f25524744a2c34682a1a74914'
fixture['status'] = 'accepted'
fixture['image_id'] = image_id
fixture['member_id'] = member_id
with mock.patch.object(self.service, '_client') as client_mock:
client_mock.call.return_value = self._make_image_member_fixtures(
image_id=image_id, member_id=member_id, status='accepted')
result = self.service.list_members(self.context, image_id)
self.assertEqual([fixture], result)
client_mock.call.assert_called_once_with(self.context,
'list',
controller='image_members',
image_id=image_id)
def test_get_api_servers(self):
result = glance.get_api_servers(self.context)
expected = (u'example.com:9292', False)
self.assertEqual(expected, next(result))
def test_get_api_servers_not_mounted_at_root_and_ssl(self):
service_catalog = [{u'type': u'image', u'name': u'glance',
u'endpoints': [{
u'publicURL': u'https://example.com/image'}]}]
self.context = context.RequestContext('fake', 'fake', auth_token=True)
self.context.service_catalog = service_catalog
result = glance.get_api_servers(self.context)
expected = (u'example.com/image', True)
self.assertEqual(expected, next(result))
def test_create_with_instance_id(self):
"""Ensure instance_id is persisted as an image-property."""
fixture = {'name': 'test image',
'is_public': False,
'protected': False,
'properties': {'instance_id': '42', 'user_id': 'fake'}}
image_id = self.service.create(self.context, fixture)['id']
image_meta = self.service.show(self.context, image_id)
expected = {
'id': image_id,
'name': 'test image',
'protected': False,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'status': None,
'properties': {'instance_id': '42', 'is_public': False,
'user_id': 'fake'},
'owner': None,
'visibility': None,
}
self.assertDictEqual(expected, image_meta)
image_metas = self.service.detail(self.context)
self.assertDictEqual(expected, image_metas[0])
def test_create_without_instance_id(self):
"""Test Creating images without instance_id.
Ensure we can create an image without having to specify an
instance_id. Public images are an example of an image not tied to an
instance.
"""
fixture = {'name': 'test image', 'is_public': False,
'protected': False}
image_id = self.service.create(self.context, fixture)['id']
expected = {
'id': image_id,
'name': 'test image',
'protected': False,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'status': None,
'properties': {'is_public': False},
'owner': None,
'visibility': None,
}
actual = self.service.show(self.context, image_id)
self.assertDictEqual(expected, actual)
def test_show_shared_image_membership_success(self):
"""Test Create Shared Image Membership Success
Ensure we can get access to a shared image
"""
fixture = {'name': 'test image', 'is_public': False,
'protected': False, 'visibility': 'shared'}
# pid = self.context.project_id
image_id = self.service.create(self.context, fixture)['id']
image = {
'id': image_id,
'name': 'test image',
'protected': False,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'status': None,
'properties': {'is_public': False},
'owner': None,
'visibility': None,
}
member_id = '50fcc79f25524744a2c34682a1a74914'
with mock.patch.object(self.service, '_client') as client_mock:
with mock.patch.object(
self.service, '_translate_from_glance') as tg_mock:
tg_mock.return_value = {}
mock_image = mock.Mock()
mock_image.is_public = False
mock_image.properties = {'is_public': False}
mock_image.visibility = 'shared'
mock_image.keys.return_value = image.keys()
client_mock.call.side_effect = [
mock_image,
self._make_image_member_fixtures(image_id=image_id,
member_id=member_id,
status='accepted')]
self.context.project_id = member_id
self.context.is_admin = False
self.context.user_id = image_id
self.context.auth_token = False
self.service.show(self.context, image_id)
def test_show_shared_image_membership_fail_status(self):
"""Test Create Shared Image Membership Failure
Ensure we can't get access to a shared image with the wrong membership
status (in this case 'pending')
"""
fixture = {'name': 'test image', 'is_public': False,
'protected': False, 'visibility': 'shared'}
# pid = self.context.project_id
image_id = self.service.create(self.context, fixture)['id']
image = {
'id': image_id,
'name': 'test image',
'protected': False,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'status': None,
'properties': {'is_public': False},
'owner': None,
'visibility': None,
}
member_id = '50fcc79f25524744a2c34682a1a74914'
with mock.patch.object(self.service, '_client') as client_mock:
with mock.patch.object(
self.service, '_translate_from_glance') as tg_mock:
tg_mock.return_value = {}
mock_image = mock.Mock()
mock_image.is_public = False
mock_image.properties = {'is_public': False}
mock_image.visibility = 'shared'
mock_image.keys.return_value = image.keys()
client_mock.call.side_effect = [
mock_image,
self._make_image_member_fixtures(image_id=image_id,
member_id=member_id,
status='pending')]
self.context.project_id = member_id
self.context.is_admin = False
self.context.user_id = image_id
self.context.auth_token = False
self.assertRaises(exception.ImageNotFound,
self.service.show,
self.context,
image_id)
def test_create(self):
fixture = self._make_fixture(name='test image')
num_images = len(self.service.detail(self.context))
image_id = self.service.create(self.context, fixture)['id']
self.assertIsNotNone(image_id)
self.assertEqual(num_images + 1,
len(self.service.detail(self.context)))
def test_create_and_show_non_existing_image(self):
fixture = self._make_fixture(name='test image')
image_id = self.service.create(self.context, fixture)['id']
self.assertIsNotNone(image_id)
self.assertRaises(exception.ImageNotFound,
self.service.show,
self.context,
'bad image id')
def test_detail_private_image(self):
fixture = self._make_fixture(name='test image')
fixture['visibility'] = 'private'
fixture['protected'] = False
properties = {'owner_id': 'proj1'}
fixture['properties'] = properties
self.service.create(self.context, fixture)
proj = self.context.project_id
self.context.project_id = 'proj1'
image_metas = self.service.detail(self.context)
self.context.project_id = proj
self.assertEqual(1, len(image_metas))
self.assertEqual('test image', image_metas[0]['name'])
self.assertEqual('private', image_metas[0]['visibility'])
def test_detail_v2(self):
"""Check we don't send is_public key by default with Glance v2."""
with mock.patch.object(self.service, '_client') as client_mock:
client_mock.return_value = []
result = self.service.detail(self.context)
self.assertListEqual([], result)
client_mock.call.assert_called_once_with(self.context, 'list')
def test_detail_marker(self):
fixtures = []
ids = []
for i in range(10):
fixture = self._make_fixture(name='TestImage %d' % (i))
fixtures.append(fixture)
ids.append(self.service.create(self.context, fixture)['id'])
image_metas = self.service.detail(self.context, marker=ids[1])
self.assertEqual(8, len(image_metas))
i = 2
for meta in image_metas:
expected = {
'id': ids[i],
'status': None,
'protected': None,
'name': 'TestImage %d' % (i),
'properties': {'properties': {}},
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'owner': None,
'visibility': None,
}
self.assertDictEqual(expected, meta)
i = i + 1
def test_detail_limit(self):
fixtures = []
ids = []
for i in range(10):
fixture = self._make_fixture(name='TestImage %d' % (i))
fixtures.append(fixture)
ids.append(self.service.create(self.context, fixture)['id'])
image_metas = self.service.detail(self.context, limit=5)
self.assertEqual(5, len(image_metas))
def test_detail_default_limit(self):
fixtures = []
ids = []
for i in range(10):
fixture = self._make_fixture(name='TestImage %d' % (i))
fixtures.append(fixture)
ids.append(self.service.create(self.context, fixture)['id'])
image_metas = self.service.detail(self.context)
for i, meta in enumerate(image_metas):
self.assertEqual(meta['name'], 'TestImage %d' % (i))
def test_detail_marker_and_limit(self):
fixtures = []
ids = []
for i in range(10):
fixture = self._make_fixture(name='TestImage %d' % (i))
fixtures.append(fixture)
ids.append(self.service.create(self.context, fixture)['id'])
image_metas = self.service.detail(self.context, marker=ids[3], limit=5)
self.assertEqual(5, len(image_metas))
i = 4
for meta in image_metas:
expected = {
'id': ids[i],
'status': None,
'protected': None,
'name': 'TestImage %d' % (i),
'properties': {'properties': {}},
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'owner': None,
'visibility': None,
}
self.assertDictEqual(expected, meta)
i = i + 1
def test_detail_invalid_marker(self):
fixtures = []
ids = []
for i in range(10):
fixture = self._make_fixture(name='TestImage %d' % (i))
fixtures.append(fixture)
ids.append(self.service.create(self.context, fixture)['id'])
self.assertRaises(exception.Invalid, self.service.detail,
self.context, marker='invalidmarker')
def test_update(self):
fixture = self._make_fixture(name='test image')
image = self.service.create(self.context, fixture)
image_id = image['id']
fixture['name'] = 'new image name'
self.service.update(self.context, image_id, fixture)
new_image_data = self.service.show(self.context, image_id)
self.assertEqual('new image name', new_image_data['name'])
def test_update_with_data(self):
fixture = self._make_fixture(name='test image')
image = self.service.create(self.context, fixture)
image_id = image['id']
fixture['name'] = 'new image name'
data = '*' * 256
self.service.update(self.context, image_id, fixture, data=data)
new_image_data = self.service.show(self.context, image_id)
self.assertEqual(256, new_image_data['size'])
self.assertEqual('new image name', new_image_data['name'])
@mock.patch.object(glance.GlanceImageService, '_translate_from_glance')
@mock.patch.object(glance.GlanceImageService, 'show')
def test_update_purge_props(self, show, translate_from_glance):
image_id = mock.sentinel.image_id
client = mock.Mock(call=mock.Mock())
service = glance.GlanceImageService(client=client)
image_meta = {'properties': {'k1': 'v1'}}
show.return_value = {'properties': {'k2': 'v2'}}
translate_from_glance.return_value = image_meta.copy()
ret = service.update(self.context, image_id, image_meta)
self.assertDictEqual(image_meta, ret)
client.call.assert_called_once_with(
self.context, 'update', image_id, k1='v1', remove_props=['k2'])
def test_delete(self):
fixture1 = self._make_fixture(name='test image 1')
fixture2 = self._make_fixture(name='test image 2')
fixtures = [fixture1, fixture2]
num_images = len(self.service.detail(self.context))
self.assertEqual(0, num_images)
ids = []
for fixture in fixtures:
new_id = self.service.create(self.context, fixture)['id']
ids.append(new_id)
num_images = len(self.service.detail(self.context))
self.assertEqual(2, num_images)
self.service.delete(self.context, ids[0])
num_images = len(self.service.detail(self.context))
self.assertEqual(1, num_images)
def test_show_passes_through_to_client(self):
fixture = self._make_fixture(name='image1', is_public=True)
image_id = self.service.create(self.context, fixture)['id']
image_meta = self.service.show(self.context, image_id)
expected = {
'id': image_id,
'name': 'image1',
'protected': None,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'status': None,
'properties': {'is_public': True, 'properties': {}},
'owner': None,
'visibility': None
}
self.assertEqual(expected, image_meta)
def test_show_raises_when_no_authtoken_in_the_context(self):
fixture = self._make_fixture(name='image1',
is_public=False,
protected=False)
image_id = self.service.create(self.context, fixture)['id']
self.context.auth_token = False
self.assertRaises(exception.ImageNotFound,
self.service.show,
self.context,
image_id)
def test_detail_passes_through_to_client(self):
fixture = self._make_fixture(name='image10', is_public=True)
image_id = self.service.create(self.context, fixture)['id']
image_metas = self.service.detail(self.context)
expected = [
{
'id': image_id,
'name': 'image10',
'protected': None,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted': None,
'status': None,
'properties': {'is_public': True, 'properties': {}},
'owner': None,
'visibility': None
},
]
self.assertEqual(expected, image_metas)
def test_show_makes_datetimes(self):
fixture = self._make_datetime_fixture()
image_id = self.service.create(self.context, fixture)['id']
image_meta = self.service.show(self.context, image_id)
self.assertEqual(self.NOW_DATETIME, image_meta['created_at'])
self.assertEqual(self.NOW_DATETIME, image_meta['updated_at'])
def test_detail_makes_datetimes(self):
fixture = self._make_datetime_fixture()
self.service.create(self.context, fixture)
image_meta = self.service.detail(self.context)[0]
self.assertEqual(self.NOW_DATETIME, image_meta['created_at'])
self.assertEqual(self.NOW_DATETIME, image_meta['updated_at'])
def test_download_with_retries(self):
tries = [0]
class MyGlanceStubClient(glance_stubs.StubGlanceClient):
"""A client that fails the first time, then succeeds."""
def get(self, image_id):
if tries[0] == 0:
tries[0] = 1
raise glanceclient.exc.ServiceUnavailable('')
else:
return {}
client = MyGlanceStubClient()
service = self._create_image_service(client)
image_id = 1 # doesn't matter
writer = NullWriter()
# When retries are disabled, we should get an exception
self.flags(glance_num_retries=0)
self.assertRaises(exception.GlanceConnectionFailed,
service.download,
self.context,
image_id,
writer)
# Now lets enable retries. No exception should happen now.
tries = [0]
self.flags(glance_num_retries=1)
service.download(self.context, image_id, writer)
def test_client_forbidden_converts_to_imagenotauthed(self):
class MyGlanceStubClient(glance_stubs.StubGlanceClient):
"""A client that raises a Forbidden exception."""
def get(self, image_id):
raise glanceclient.exc.Forbidden(image_id)
client = MyGlanceStubClient()
service = self._create_image_service(client)
image_id = 1 # doesn't matter
writer = NullWriter()
self.assertRaises(exception.ImageNotAuthorized, service.download,
self.context, image_id, writer)
def test_client_httpforbidden_converts_to_imagenotauthed(self):
class MyGlanceStubClient(glance_stubs.StubGlanceClient):
"""A client that raises a HTTPForbidden exception."""
def get(self, image_id):
raise glanceclient.exc.HTTPForbidden(image_id)
client = MyGlanceStubClient()
service = self._create_image_service(client)
image_id = 1 # doesn't matter
writer = NullWriter()
self.assertRaises(exception.ImageNotAuthorized, service.download,
self.context, image_id, writer)
def test_client_notfound_converts_to_imagenotfound(self):
class MyGlanceStubClient(glance_stubs.StubGlanceClient):
"""A client that raises a NotFound exception."""
def get(self, image_id):
raise glanceclient.exc.NotFound(image_id)
client = MyGlanceStubClient()
service = self._create_image_service(client)
image_id = 1 # doesn't matter
writer = NullWriter()
self.assertRaises(exception.ImageNotFound, service.download,
self.context, image_id, writer)
def test_client_httpnotfound_converts_to_imagenotfound(self):
class MyGlanceStubClient(glance_stubs.StubGlanceClient):
"""A client that raises a HTTPNotFound exception."""
def get(self, image_id):
raise glanceclient.exc.HTTPNotFound(image_id)
client = MyGlanceStubClient()
service = self._create_image_service(client)
image_id = 1 # doesn't matter
writer = NullWriter()
self.assertRaises(exception.ImageNotFound, service.download,
self.context, image_id, writer)
@mock.patch('six.moves.builtins.open')
@mock.patch('shutil.copyfileobj')
@mock.patch('cinder.image.glance.get_api_servers',
return_value=itertools.cycle([(False, 'localhost:9292')]))
def test_download_from_direct_file(self, api_servers,
mock_copyfileobj, mock_open):
fixture = self._make_fixture(name='test image',
locations=[{'url': 'file:///tmp/test'}])
image_id = self.service.create(self.context, fixture)['id']
writer = NullWriter()
self.flags(allowed_direct_url_schemes=['file'])
self.service.download(self.context, image_id, writer)
mock_copyfileobj.assert_called_once_with(mock.ANY, writer)
@mock.patch('six.moves.builtins.open')
@mock.patch('shutil.copyfileobj')
@mock.patch('cinder.image.glance.get_api_servers',
return_value=itertools.cycle([(False, 'localhost:9292')]))
def test_download_from_direct_file_non_file(self, api_servers,
mock_copyfileobj, mock_open):
fixture = self._make_fixture(name='test image',
direct_url='swift+http://test/image')
image_id = self.service.create(self.context, fixture)['id']
writer = NullWriter()
self.flags(allowed_direct_url_schemes=['file'])
self.service.download(self.context, image_id, writer)
self.assertIsNone(mock_copyfileobj.call_args)
def test_glance_client_image_id(self):
fixture = self._make_fixture(name='test image')
image_id = self.service.create(self.context, fixture)['id']
(_service, same_id) = glance.get_remote_image_service(self.context,
image_id)
self.assertEqual(same_id, image_id)
def test_glance_client_image_ref(self):
fixture = self._make_fixture(name='test image')
image_id = self.service.create(self.context, fixture)['id']
image_url = 'http://something-less-likely/%s' % image_id
(service, same_id) = glance.get_remote_image_service(self.context,
image_url)
self.assertEqual(same_id, image_id)
self.assertEqual('something-less-likely', service._client.netloc)
for ipv6_url in ('[::1]', '::1', '[::1]:444'):
image_url = 'http://%s/%s' % (ipv6_url, image_id)
(service, same_id) = glance.get_remote_image_service(self.context,
image_url)
self.assertEqual(same_id, image_id)
self.assertEqual(ipv6_url, service._client.netloc)
def test_extracting_missing_attributes(self):
"""Verify behavior from glance objects that are missing attributes
This fakes the image class and is missing the checksum and name
attribute as the client would return if they're not set in the
database. Regression test for bug #1308058.
"""
class MyFakeGlanceImage(glance_stubs.FakeImage):
def __init__(self, metadata):
IMAGE_ATTRIBUTES = ['size', 'disk_format', 'owner',
'container_format', 'id', 'created_at',
'updated_at', 'deleted', 'status',
'min_disk', 'min_ram', 'is_public',
'visibility', 'protected']
raw = dict.fromkeys(IMAGE_ATTRIBUTES)
raw.update(metadata)
self.__dict__['raw'] = raw
metadata = {
'id': 1,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
}
image = MyFakeGlanceImage(metadata)
actual = glance._extract_attributes(image)
expected = {
'id': 1,
'name': None,
'protected': None,
'size': None,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'created_at': self.NOW_DATETIME,
'updated_at': self.NOW_DATETIME,
'deleted_at': None,
'deleted': None,
'status': None,
'properties': {},
'owner': None,
'visibility': None,
'cinder_encryption_key_id': None
}
self.assertEqual(expected, actual)
@mock.patch('cinder.image.glance.CONF')
def test_v2_passes_visibility_param(self, config):
config.glance_num_retries = 0
metadata = {
'id': 1,
'size': 2,
'visibility': 'public',
}
image = glance_stubs.FakeImage(metadata)
client = glance_stubs.StubGlanceClient()
service = self._create_image_service(client)
service._image_schema = glance_stubs.FakeSchema()
actual = service._translate_from_glance('fake_context', image)
expected = {
'id': 1,
'name': None,
'visibility': 'public',
'protected': None,
'size': 2,
'min_disk': None,
'min_ram': None,
'disk_format': None,
'container_format': None,
'checksum': None,
'deleted': None,
'status': None,
'properties': {},
'owner': None,
'created_at': None,
'updated_at': None
}
self.assertEqual(expected, actual)
@mock.patch('cinder.image.glance.CONF')
def test_extracting_v2_boot_properties(self, config):
config.glance_num_retries = 0
metadata = {
'id': 1,
'size': 2,
'min_disk': 2,
'min_ram': 2,
'kernel_id': 'foo',
'ramdisk_id': 'bar',
}
image = glance_stubs.FakeImage(metadata)
client = glance_stubs.StubGlanceClient()
service = self._create_image_service(client)
service._image_schema = glance_stubs.FakeSchema()
actual = service._translate_from_glance('fake_context', image)
expected = {
'id': 1,
'name': None,
'visibility': None,
'protected': None,
'size': 2,
'min_disk': 2,
'min_ram': 2,
'disk_format': None,
'container_format': None,
'checksum': None,
'deleted': None,
'status': None,
'properties': {'kernel_id': 'foo',
'ramdisk_id': 'bar'},
'owner': None,
'created_at': None,
'updated_at': None
}
self.assertEqual(expected, actual)
def test_translate_to_glance(self):
client = glance_stubs.StubGlanceClient()
service = self._create_image_service(client)
metadata = {
'id': 1,
'size': 2,
'min_disk': 2,
'min_ram': 2,
'properties': {'kernel_id': 'foo',
'ramdisk_id': 'bar',
'x_billinginfo': '123'},
}
actual = service._translate_to_glance(metadata)
expected = {
'id': 1,
'size': 2,
'min_disk': 2,
'min_ram': 2,
'kernel_id': 'foo',
'ramdisk_id': 'bar',
'x_billinginfo': '123',
}
self.assertEqual(expected, actual)
@mock.patch('cinder.image.glance.glanceclient.Client')
@mock.patch('cinder.image.glance.get_api_servers',
return_value=itertools.cycle([(False, 'localhost:9292')]))
def test_call_glance_over_quota(self, api_servers, _mockglanceclient):
"""Test glance version set by arg to GlanceClientWrapper"""
glance_wrapper = glance.GlanceClientWrapper()
fake_client = mock.Mock()
fake_client.images.method = mock.Mock(
side_effect=glanceclient.exc.HTTPOverLimit)
self.mock_object(glance_wrapper, 'client', fake_client)
self.assertRaises(exception.ImageLimitExceeded,
glance_wrapper.call, 'fake_context', 'method')
def _create_failing_glance_client(info):
class MyGlanceStubClient(glance_stubs.StubGlanceClient):
"""A client that fails the first time, then succeeds."""
def get(self, image_id):
info['num_calls'] += 1
if info['num_calls'] == 1:
raise glanceclient.exc.ServiceUnavailable('')
return {}
return MyGlanceStubClient()
class TestGlanceImageServiceClient(test.TestCase):
def setUp(self):
super(TestGlanceImageServiceClient, self).setUp()
self.context = context.RequestContext('fake', 'fake', auth_token=True)
self.mock_object(glance.time, 'sleep', return_value=None)
service_auth.reset_globals()
@mock.patch('cinder.service_auth.get_auth_plugin')
@mock.patch.object(ks_session.Session, 'load_from_options')
def test_create_glance_client_with_protocol_http(
self, mock_load, mock_get_auth_plugin):
glance._SESSION = None
self.flags(auth_strategy='keystone')
self.flags(glance_request_timeout=None)
class MyGlanceStubClient(object):
def __init__(inst, version, *args, **kwargs):
self.assertEqual('2', version)
self.assertEqual("http://fake_host:9292", args[0])
self.assertNotIn('timeout', kwargs)
self.assertIn("session", kwargs)
self.assertIn("auth", kwargs)
config_options = {'insecure': False,
'cacert': None,
'timeout': None,
'split_loggers': False}
mock_get_auth_plugin.return_value = context._ContextAuthPlugin
mock_load.return_value = session.Session
self.mock_object(glance.glanceclient, 'Client', MyGlanceStubClient)
client = glance._create_glance_client(self.context, 'fake_host:9292',
False)
self.assertIsInstance(client, MyGlanceStubClient)
mock_get_auth_plugin.assert_called_once_with(self.context)
mock_load.assert_called_once_with(**config_options)
@mock.patch('cinder.service_auth.get_auth_plugin')
@mock.patch.object(ks_session.Session, 'load_from_options')
def test_create_glance_client_with_protocol_https(
self, mock_load, mock_get_auth_plugin):
glance._SESSION = None
self.flags(auth_strategy='keystone')
self.flags(glance_request_timeout=60)
self.flags(
glance_ca_certificates_file='/opt/stack/data/ca-bundle.pem')
class MyGlanceStubClient(object):
def __init__(inst, version, *args, **kwargs):
self.assertEqual('2', version)
self.assertEqual("https://fake_host:9292", args[0])
self.assertNotIn('timeout', kwargs)
self.assertIn("session", kwargs)
self.assertIn("auth", kwargs)
config_options = {'insecure': False,
'cacert': '/opt/stack/data/ca-bundle.pem',
'timeout': 60,
'split_loggers': False}
mock_get_auth_plugin.return_value = context._ContextAuthPlugin
mock_load.return_value = session.Session
self.mock_object(glance.glanceclient, 'Client', MyGlanceStubClient)
client = glance._create_glance_client(self.context, 'fake_host:9292',
True)
self.assertIsInstance(client, MyGlanceStubClient)
mock_get_auth_plugin.assert_called_once_with(self.context)
mock_load.assert_called_once_with(**config_options)
def test_create_glance_client_auth_strategy_noauth_with_protocol_https(
self):
self.flags(auth_strategy='noauth')
self.flags(glance_request_timeout=60)
self.flags(glance_api_insecure=False)
self.flags(
glance_ca_certificates_file='/opt/stack/data/ca-bundle.pem')
class MyGlanceStubClient(object):
def __init__(inst, version, *args, **kwargs):
self.assertEqual('2', version)
self.assertEqual('https://fake_host:9292', args[0])
self.assertEqual(60, kwargs['timeout'])
self.assertNotIn("session", kwargs)
self.assertNotIn("auth", kwargs)
self.assertEqual(
'/opt/stack/data/ca-bundle.pem', kwargs['cacert'])
self.assertEqual(False, kwargs['insecure'])
self.mock_object(glance.glanceclient, 'Client', MyGlanceStubClient)
client = glance._create_glance_client(self.context, 'fake_host:9292',
True)
self.assertIsInstance(client, MyGlanceStubClient)
def test_create_glance_client_auth_strategy_noauth_with_protocol_http(
self):
self.flags(auth_strategy='noauth')
self.flags(glance_request_timeout=None)
class MyGlanceStubClient(object):
def __init__(inst, version, *args, **kwargs):
self.assertEqual('2', version)
self.assertEqual("http://fake_host:9292", args[0])
self.assertNotIn('timeout', kwargs)
self.assertNotIn("session", kwargs)
self.assertNotIn("auth", kwargs)
self.mock_object(glance.glanceclient, 'Client', MyGlanceStubClient)
client = glance._create_glance_client(self.context, 'fake_host:9292',
False)
self.assertIsInstance(client, MyGlanceStubClient)
| 39.486792
| 79
| 0.567876
|
2263e16072e8d5d4ffc97cfeaffbc88e0583039d
| 2,128
|
py
|
Python
|
pypd/models/service.py
|
pmgrafe/pagerduty-api-python-client
|
f420b34ca9b29689cc2ecc9adca6dc5d56ae7161
|
[
"MIT"
] | 88
|
2016-07-26T04:51:18.000Z
|
2021-09-12T04:50:18.000Z
|
pypd/models/service.py
|
pmgrafe/pagerduty-api-python-client
|
f420b34ca9b29689cc2ecc9adca6dc5d56ae7161
|
[
"MIT"
] | 57
|
2016-07-26T13:21:23.000Z
|
2020-06-23T18:22:24.000Z
|
pypd/models/service.py
|
pmgrafe/pagerduty-api-python-client
|
f420b34ca9b29689cc2ecc9adca6dc5d56ae7161
|
[
"MIT"
] | 80
|
2016-06-02T18:29:12.000Z
|
2020-06-21T08:35:28.000Z
|
# Copyright (c) PagerDuty.
# See LICENSE for details.
from .entity import Entity
from .integration import Integration
from .vendor import Vendor
class Service(Entity):
"""PagerDuty service entity."""
STR_OUTPUT_FIELDS = ('id', 'name',)
integrationFactory = Integration
vendorFactory = Vendor
ALLOWED_SERVICE_TYPES = [
# 'service',
'service_reference',
]
@classmethod
def validate(cls, service_info):
"""Validate `service_info` to be acceptable service data."""
assert isinstance(service_info, dict)
assert (service_info['type'] in cls.ALLOWED_SERVICE_TYPES)
def create_integration(self, integration_info, **kwargs):
"""
Create an integration for this service.
See: https://v2.developer.pagerduty.com/v2/page/api-reference#!/
Services/post_services_id_integrations
"""
service_info = integration_info.get('service')
vendor_info = integration_info.get('vendor')
if service_info is not None:
self.__class__.validate(service_info)
if vendor_info is not None:
self.vendorFactory.validate(vendor_info)
endpoint = '{0}/{1}/integrations'.format(
self.endpoint,
self['id'],
)
return self.integrationFactory.create(
endpoint=endpoint,
api_key=self.api_key,
data=integration_info,
query_params=kwargs
)
def integrations(self, **kwargs):
"""Retrieve all this services integrations."""
ids = [ref['id'] for ref in self['integrations']]
return [Integration.fetch(id, service=self, query_params=kwargs) for id in ids]
def get_integration(self, id, **kwargs):
"""Retrieve a single integration by id."""
return Integration.fetch(id, service=self, query_params=kwargs)
def update_integration(self, *args, **kwargs):
"""Update this integration on this service."""
raise NotImplemented
# sugar-pills
view_integration = get_integration
add_integration = create_integration
| 30.84058
| 87
| 0.643327
|
98c5f5782a090e3f42f1e15515953e26a4cd7269
| 17,669
|
py
|
Python
|
src/ros_robodk_post_processors/robodk_post_processors/HIWIN_HRSS.py
|
jeritgeorge/ros_robodk_post_processors
|
dd6fda231c5bcf964bf177b1737dc4b06c27cd33
|
[
"BSD-3-Clause"
] | null | null | null |
src/ros_robodk_post_processors/robodk_post_processors/HIWIN_HRSS.py
|
jeritgeorge/ros_robodk_post_processors
|
dd6fda231c5bcf964bf177b1737dc4b06c27cd33
|
[
"BSD-3-Clause"
] | null | null | null |
src/ros_robodk_post_processors/robodk_post_processors/HIWIN_HRSS.py
|
jeritgeorge/ros_robodk_post_processors
|
dd6fda231c5bcf964bf177b1737dc4b06c27cd33
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2017 - RoboDK Software S.L. - http://www.robodk.com/
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------
# This file is a POST PROCESSOR for Robot Offline Programming to generate programs
# for a HIWIN robot (HRSS: HIWIN Robot System Software)
#
# To edit/test this POST PROCESSOR script file:
# Select "Program"->"Add/Edit Post Processor", then select your post or create a new one.
# You can edit this file using any text editor or Python editor. Using a Python editor allows to quickly evaluate a sample program at the end of this file.
# Python should be automatically installed with RoboDK
#
# You can also edit the POST PROCESSOR manually:
# 1- Open the *.py file with Python IDLE (right click -> Edit with IDLE)
# 2- Make the necessary changes
# 3- Run the file to open Python Shell: Run -> Run module (F5 by default)
# 4- The "test_post()" function is called automatically
# Alternatively, you can edit this file using a text editor and run it with Python
#
# To use a POST PROCESSOR file you must place the *.py file in "C:/RoboDK/Posts/"
# To select one POST PROCESSOR for your robot in RoboDK you must follow these steps:
# 1- Open the robot panel (double click a robot)
# 2- Select "Parameters"
# 3- Select "Unlock advanced options"
# 4- Select your post as the file name in the "Robot brand" box
#
# To delete an existing POST PROCESSOR script, simply delete this file (.py file)
#
# ----------------------------------------------------
# More information about RoboDK Post Processors and Offline Programming here:
# http://www.robodk.com/help#PostProcessor
# http://www.robodk.com/doc/en/PythonAPI/postprocessor.html
# ----------------------------------------------------
# ----------------------------------------------------
# Import RoboDK tools
from robodk import *
HEADER = ''';[Point&S]
DECL E6POINT P0={A1 0.0, A2 0.0, A3 0.0, A4 0.0, A5 -90.0, A6 0.0}
;NULL
;[Point&E]
;[Program&SV2]
;PTP P0 CONT Vel=100% Acc=50% TOOL[0] BASE[0]
'''
# ----------------------------------------------------
def pose_2_str(pose):
"""Converts a pose target to a string"""
#[x,y,z,w,p,r] = Pose_2_KUKA(pose)
#return ('X %.3f, Y %.3f, Z %.3f, A %.3f, B %.3f, C %.3f' % (x,y,z,r,p,w)) # old version had to be switched
[x,y,z,r,p,w] = pose_2_xyzrpw(pose)
return ('X %.3f,Y %.3f,Z %.3f,A %.3f,B %.3f,C %.3f' % (x,y,z,w,p,r))
def pose_2_str_ext(pose,joints):
njoints = len(joints)
if njoints <= 6:
return pose_2_str(pose)
else:
extaxes = ''
for i in range(njoints-6):
extaxes = extaxes + (',E%i %.5f' % (i+1, joints[i+6]))
return pose_2_str(pose) + extaxes
def angles_2_str(angles):
"""Prints a joint target"""
str = ''
data = ['A1','A2','A3','A4','A5','A6','E1','E2','E3','E4','E5','E6']
for i in range(len(angles)):
str = str + ('%s %.5f,' % (data[i], angles[i]))
str = str[:-1]
return str
def get_safe_name(varname):
"""Get a safe program name"""
for c in r' -[]/\;,><&*:%=+@!#^|?^':
varname = varname.replace(c,'_')
if len(varname) <= 0:
varname = 'Program'
if varname[0].isdigit():
varname = 'P' + varname
return varname
# ----------------------------------------------------
# Object class that handles the robot instructions/syntax
class RobotPost(object):
"""Robot post object"""
PROG_EXT = 'hrb' # set the program extension
MAX_LINES_X_PROG = 5000 # maximum number of lines per program. It will then generate multiple "pages (files)"
INCLUDE_SUB_PROGRAMS = False
# other variables
ROBOT_POST = ''
ROBOT_NAME = ''
# Multiple program files variables
PROG_NAME = 'unknown' # single program name
PROG_NAMES = []
PROG_FILES = []
PROG_LIST = []
nLines = 0
nProgs = 0
PROG = ''
LOG = ''
nAxes = 6
APO_VALUE = 1
C_DIS = ' CONT'#' C_DIS'
C_PTP = ' CONT'#' C_PTP'
SPEED_MMS = 1000
SPEED_PERCENT = 50;
ID_TOOL = 0
ID_BASE = 0
def __init__(self, robotpost=None, robotname=None, robot_axes = 6, **kwargs):
self.ROBOT_POST = robotpost
self.ROBOT_NAME = robotname
self.PROG = ''
self.LOG = ''
self.nAxes = robot_axes
for k,v in kwargs.items():
if k == 'lines_x_prog':
self.MAX_LINES_X_PROG = v
def ProgStart(self, progname, new_page = False):
progname_i = progname
if new_page:
nPages = len(self.PROG_LIST)
if nPages == 0:
progname_i = progname
else:
progname_i = "%s%i" % (self.PROG_NAME, nPages)
else:
self.PROG_NAME = progname
self.nProgs = self.nProgs + 1
self.PROG_NAMES = []
if self.nProgs > 1 and not self.INCLUDE_SUB_PROGRAMS:
return
self.PROG_NAMES.append(progname_i)
self.addline('; Program: %s' % progname_i)
if not new_page:
self.PROG = self.PROG + HEADER
#if self.nAxes > 6:
# self.addline('$ACT_EX_AX = %i' % (self.nAxes-6))
def ProgFinish(self, progname, new_page = False):
if new_page:
self.PROG = self.PROG + "\n;[Program&E]\n"
self.PROG_LIST.append(self.PROG)
self.PROG = ''
self.nLines = 0
elif self.nProgs <= 1 or self.INCLUDE_SUB_PROGRAMS:
self.PROG = self.PROG + "\n;[Program&E]\n"
def progsave(self, folder, progname, ask_user = False, show_result = False):
progname = progname + '.' + self.PROG_EXT
if ask_user or not DirExists(folder):
filesave = getSaveFile(folder, progname, 'Save program as...')
if filesave is not None:
filesave = filesave.name
else:
return
else:
filesave = folder + '/' + progname
fid = open(filesave, "w")
fid.write(self.PROG)
fid.close()
print('SAVED: %s\n' % filesave) # tell RoboDK the path of the saved file
self.PROG_FILES.append(filesave)
# open file with default application
if show_result:
if type(show_result) is str:
# Open file with provided application
import subprocess
p = subprocess.Popen([show_result, filesave])
elif type(show_result) is list:
import subprocess
p = subprocess.Popen(show_result + [filesave])
else:
# open file with default application
import os
os.startfile(filesave)
if len(self.LOG) > 0:
mbox('Program generation LOG:\n\n' + self.LOG)
def ProgSave(self, folder, progname, ask_user = False, show_result = False):
if len(self.PROG_LIST) >= 1:
if self.nLines > 0:
self.PROG_LIST.append(self.PROG)
self.PROG = ''
self.nLines = 0
npages = len(self.PROG_LIST)
progname_main = progname + "Main"
mainprog = "Program: %s\n" % progname_main
#mainprog += "EXT BAS (BAS_COMMAND :IN,REAL :IN )"
#for i in range(npages):
# self.PROG = self.PROG_LIST[i]
# mainprog += "EXT %s()\n" % self.PROG_NAMES[i]
for i in range(npages):
self.PROG = self.PROG_LIST[i]
mainprog += "%s\n" % self.PROG_NAMES[i]
self.PROG = mainprog
self.progsave(folder, progname_main, ask_user, show_result)
self.LOG = ''
if len(self.PROG_FILES) == 0:
# cancelled by user
return
first_file = self.PROG_FILES[0]
folder_user = getFileDir(first_file)
# progname_user = getFileName(self.FILE_SAVED)
for i in range(npages):
self.PROG = self.PROG_LIST[i]
self.progsave(folder_user, self.PROG_NAMES[i], False, show_result)
else:
self.progsave(folder, progname, ask_user, show_result)
def ProgSendRobot(self, robot_ip, remote_path, ftp_user, ftp_pass):
"""Send a program to the robot using the provided parameters. This method is executed right after ProgSave if we selected the option "Send Program to Robot".
The connection parameters must be provided in the robot connection menu of RoboDK"""
UploadFTP(self.PROG_FILES, robot_ip, remote_path, ftp_user, ftp_pass)
def MoveJ(self, pose, joints, conf_RLF=None):
"""Add a joint movement"""
self.addline('PTP {' + angles_2_str(joints) + '}' + self.C_PTP + ' Vel=%i%% TOOL[%i] BASE[%i]' % (self.SPEED_PERCENT,self.ID_TOOL,self.ID_BASE))
#PTP P1 CONT Vel=100% Acc=50% TOOL[0] BASE[0]
def MoveL(self, pose, joints, conf_RLF=None):
"""Add a linear movement"""
self.addline('LIN {' + pose_2_str_ext(pose,joints) + '}' + self.C_DIS + ' Vel=%.1fmm/s TOOL[%i] BASE[%i]' % (self.SPEED_MMS,self.ID_TOOL,self.ID_BASE))
def MoveC(self, pose1, joints1, pose2, joints2, conf_RLF_1=None, conf_RLF_2=None):
"""Add a circular movement"""
self.addline('CIRC {' + pose_2_str_ext(pose1,joints1) + '}{' + pose_2_str_ext(pose2,joints2) + '}' + self.C_DIS + ' Vel=%.1fmm/s TOOL[%i] BASE[%i]' % (self.SPEED_MMS,self.ID_TOOL,self.ID_BASE))
def setFrame(self, pose, frame_id=None, frame_name=None):
"""Change the robot reference frame"""
if frame_id is None or frame_id < 1:
frame_id = 1
frame_name = get_safe_name(frame_name)
self.ID_BASE = frame_id
self.addline('FRAME ' + frame_name + '={' + pose_2_str(pose) + '}')
self.addline('SET_BASE ' + str(frame_id))
self.addline('SET_BASE ' + frame_name)
def setTool(self, pose, tool_id=None, tool_name=None):
"""Change the robot TCP"""
if tool_id is None or tool_id < 1:
tool_id = 0
tool_name = get_safe_name(tool_name)
self.ID_TOOL = tool_id
self.addline('FRAME ' + tool_name + '={' + pose_2_str(pose) + '}')
self.addline('SET_TOOL ' + str(tool_id))
self.addline('SET_TOOL ' + tool_name)
def Pause(self, time_ms):
"""Pause the robot program"""
if time_ms <= 0:
self.addline('HALT')
else:
self.addline('WAIT SEC %.3f' % (time_ms*0.001))
def setSpeed(self, speed_mms):
"""Changes the robot speed (in mm/s)"""
self.SPEED_MMS = speed_mms
self.addline('; SPEED = %.5f mm/s' % (speed_mms))
def setAcceleration(self, accel_mmss):
"""Changes the current robot acceleration"""
self.addline('; ACCELERATION = %.5f' % (accel_mmss/1000.0))
def setSpeedJoints(self, speed_degs):
"""Changes the robot joint speed (in deg/s)"""
self.addline('; JOINT SPEED = %.5f deg/s' % speed_degs)
self.SPEED_PERCENT = max(0,min(100,100*speed_degs/300.0))
def setAccelerationJoints(self, accel_degss):
"""Changes the robot joint acceleration (in deg/s2)"""
self.addline('; JOINT ACCELERATION = %.5f deg/s2' % accel_degss)
def setZoneData(self, zone_mm):
"""Changes the zone data approach (makes the movement more smooth)"""
self.APO_VALUE = zone_mm
if zone_mm >= 0:
self.addline('CPTP = %.3f' % zone_mm)
self.addline('CLIN = %.3f' % zone_mm)
self.C_DIS = ' C_LIN'
self.C_PTP = ' C_PTP'
else:
self.C_DIS = ' CONT'
self.C_PTP = ' CONT'
def setDO(self, io_var, io_value):
"""Sets a variable (output) to a given value"""
if type(io_var) != str: # set default variable name if io_var is a number
io_var = '$DO[%s]' % str(io_var)
if type(io_value) != str: # set default variable value if io_value is a number
if io_value > 0:
io_value = 'TRUE'
else:
io_value = 'FALSE'
# at this point, io_var and io_value must be string values
self.addline('%s=%s' % (io_var, io_value))
def waitDI(self, io_var, io_value, timeout_ms=-1):
"""Waits for an input io_var to attain a given value io_value. Optionally, a timeout can be provided."""
if type(io_var) != str: # set default variable name if io_var is a number
io_var = '$DI[%s]' % str(io_var)
if type(io_value) != str: # set default variable value if io_value is a number
if io_value > 0:
io_value = 'TRUE'
else:
io_value = 'FALSE'
# at this point, io_var and io_value must be string values
#if timeout_ms < 0:
self.addline('WHILE %s!=%s' % (io_var, io_value))
self.addline(' WAIT SEC 0.1')
self.addline('ENDWHILE')
#else:
def RunCode(self, code, is_function_call = False):
"""Adds code or a function call"""
if is_function_call:
code.replace(' ','_')
if code.endswith('()'):
code = code[:-2]
self.addline(code)
else:
self.addline(code)
def RunMessage(self, message, iscomment = False):
"""Add a joint movement"""
if iscomment:
self.addline('; ' + message)
else:
self.addline('; Display message: ' + message)
# ------------------ private ----------------------
def addline(self, newline):
"""Add a program line"""
if self.nProgs > 1 and not self.INCLUDE_SUB_PROGRAMS:
return
if self.nLines > self.MAX_LINES_X_PROG:
self.nLines = 0
self.ProgFinish(self.PROG_NAME, True)
self.ProgStart(self.PROG_NAME, True)
self.PROG = self.PROG + newline + '\n'
self.nLines = self.nLines + 1
def addlog(self, newline):
"""Add a log message"""
if self.nProgs > 1 and not self.INCLUDE_SUB_PROGRAMS:
return
self.LOG = self.LOG + newline + '\n'
# -------------------------------------------------
# ------------ For testing purposes ---------------
def Pose(xyzrpw):
[x,y,z,r,p,w] = xyzrpw
a = r*math.pi/180
b = p*math.pi/180
c = w*math.pi/180
ca = math.cos(a)
sa = math.sin(a)
cb = math.cos(b)
sb = math.sin(b)
cc = math.cos(c)
sc = math.sin(c)
return Mat([[cb*ca, ca*sc*sb - cc*sa, sc*sa + cc*ca*sb, x],[cb*sa, cc*ca + sc*sb*sa, cc*sb*sa - ca*sc, y],[-sb, cb*sc, cc*cb, z],[0,0,0,1]])
def test_post():
"""Test the post with a basic program"""
robot = RobotPost('Kuka_custom', 'Generic Kuka')
robot.ProgStart("Program")
robot.RunMessage("Program generated by RoboDK", True)
robot.setFrame(Pose([807.766544, -963.699898, 41.478944, 0, 0, 0]))
robot.setTool(Pose([62.5, -108.253175, 100, -60, 90, 0]))
robot.MoveJ(Pose([200, 200, 500, 180, 0, 180]), [-46.18419, -6.77518, -20.54925, 71.38674, 49.58727, -302.54752] )
robot.MoveL(Pose([200, 250, 348.734575, 180, 0, -150]), [-41.62707, -8.89064, -30.01809, 60.62329, 49.66749, -258.98418] )
robot.MoveL(Pose([200, 200, 262.132034, 180, 0, -150]), [-43.73892, -3.91728, -35.77935, 58.57566, 54.11615, -253.81122] )
robot.RunMessage("Setting air valve 1 on")
robot.RunCode("TCP_On", True)
robot.Pause(1000)
robot.MoveL(Pose([200, 250, 348.734575, 180, 0, -150]), [-41.62707, -8.89064, -30.01809, 60.62329, 49.66749, -258.98418] )
robot.MoveL(Pose([250, 300, 278.023897, 180, 0, -150]), [-37.52588, -6.32628, -34.59693, 53.52525, 49.24426, -251.44677] )
robot.MoveL(Pose([250, 250, 191.421356, 180, 0, -150]), [-39.75778, -1.04537, -40.37883, 52.09118, 54.15317, -246.94403] )
robot.RunMessage("Setting air valve off")
robot.RunCode("TCP_Off", True)
robot.Pause(1000)
robot.MoveL(Pose([250, 300, 278.023897, 180, 0, -150]), [-37.52588, -6.32628, -34.59693, 53.52525, 49.24426, -251.44677] )
robot.MoveL(Pose([250, 200, 278.023897, 180, 0, -150]), [-41.85389, -1.95619, -34.89154, 57.43912, 52.34162, -253.73403] )
robot.MoveL(Pose([250, 150, 191.421356, 180, 0, -150]), [-43.82111, 3.29703, -40.29493, 56.02402, 56.61169, -249.23532] )
robot.ProgFinish("Program")
# robot.ProgSave(".","Program",True)
print(robot.PROG)
if len(robot.LOG) > 0:
mbox('Program generation LOG:\n\n' + robot.LOG)
input("Press Enter to close...")
if __name__ == "__main__":
"""Function to call when the module is executed by itself: test"""
test_post()
| 40.618391
| 201
| 0.56019
|
0ee485d8950d74bd2e6c6c9513be99e03cec477f
| 2,944
|
py
|
Python
|
gen_wordcloud.py
|
gchers/paper-analyser
|
738783eb1e6f4a3d1ac013dc7da57027ca4b9b9e
|
[
"MIT"
] | 14
|
2020-12-26T22:00:22.000Z
|
2021-11-08T11:32:29.000Z
|
gen_wordcloud.py
|
gchers/paper-analyser
|
738783eb1e6f4a3d1ac013dc7da57027ca4b9b9e
|
[
"MIT"
] | 6
|
2021-03-12T19:36:52.000Z
|
2021-05-18T10:39:06.000Z
|
gen_wordcloud.py
|
gchers/paper-analyser
|
738783eb1e6f4a3d1ac013dc7da57027ca4b9b9e
|
[
"MIT"
] | 2
|
2020-12-26T22:00:17.000Z
|
2021-03-19T12:14:55.000Z
|
import os
import sys
from wordcloud import WordCloud
if len(sys.argv) != 2:
print("usage: python pq_gen_wordcloud.py TEXTFILE")
exit(0)
whole_text = None
#with open("copy-total-words", "r") as i_f:
with open(sys.argv[1], "r") as i_f:
whole_text = i_f.read()
whole_text = whole_text.replace("cid:", "")
#wordcloud = WordCloud().generate(whole_text)
import matplotlib.pyplot as plt
#plt.imshow(wordcloud, interpolation="bilinear")
#plt.axis("off")
wordcloud = WordCloud(background_color="white",
colormap="winter",
font_path="Source_Sans_Pro/SourceSansPro-Bold.ttf",
height=800,
width=800,
min_font_size=10,
max_font_size=120,
prefer_horizontal=3.0).generate(whole_text)
plt.figure(figsize=(10.0, 10.0))
plt.imshow(wordcloud, interpolation="bilinear")
plt.axis("off")
plt.savefig("wordcloud-final.png")
# Now let's do word frequency
word_dict = dict()
split_words = whole_text.split(" ")
total_number_of_words = len(split_words)
print(total_number_of_words)
total_sorted = 0
for w in split_words:
total_sorted += 1
if w in word_dict:
word_dict[w] = word_dict[w] + 1
else:
word_dict[w] = 1
print("Total sorted: %d"%(total_sorted))
listed_word_dict = []
for key,value in word_dict.items():
listed_word_dict.append((key, value, ))
print("Length of listed word %d"%(len(listed_word_dict)))
listed_word_dict.sort(key=lambda x:x[1])
listed_word_dict.reverse()
sorted_list = listed_word_dict
print(sorted_list)
# https://www.espressoenglish.net/the-100-most-common-words-in-english/
avoid_words = { "the", "at", "there", "some", "my", "of", "be", "use", "her", "than", "and", "this", "an", "would", "first", "a", "have", "each", "make", "water", "to", "from", "which", "like", "been", "in", "or", "she", "him", "call", "is", "one", "do", "into", "who", "you", "had", "how", "time", "oil", "that", "by", "their", "has", "its", "it", "word", "if", "look", "now", "he", "but", "will", "two", "find", "was", "not", "up", "more", "long", "for", "what", "other", "write", "down", "on", "all", "about", "go", "day", "are", "were", "out", "see", "did", "as", "we", "many", "number", "get", "with", "when", "then", "no", "come", "his", "your", "them", "way", "made", "they", "can", "these", "could", "may", "I", "said", "so", "people", "part" }
avoid_words.add("=")
avoid_words.add(".")
avoid_words.add(" ")
avoid_words.add("")
avoid_words.add(",")
words = []
freqs = []
for key, value in sorted_list:
if key.lower() in avoid_words:
continue
words.append(key)
freqs.append(value)
for i in range(140):
print("sorted_list: %s"%(str(sorted_list[i])))
Data = { "words: " : words, "freqs" : freqs }
plt.clf()
plt.figure(figsize=(10.0, 10.0))
plt.barh(words[:50], freqs[:50])
plt.title("Word frequency")
#plt.show()
plt.savefig("barplot.png")
| 33.078652
| 752
| 0.611753
|
9b5291e07b4c739719122f071e9e82eaf7f00019
| 84
|
py
|
Python
|
ufile/__init__.py
|
delexxie/ufile-sdk-python
|
276864fc865b44f44129b8f42016987c3b084bf6
|
[
"MIT"
] | null | null | null |
ufile/__init__.py
|
delexxie/ufile-sdk-python
|
276864fc865b44f44129b8f42016987c3b084bf6
|
[
"MIT"
] | null | null | null |
ufile/__init__.py
|
delexxie/ufile-sdk-python
|
276864fc865b44f44129b8f42016987c3b084bf6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
UCloud UFile SDK for python
"""
__version__ = '3.0.1'
| 10.5
| 27
| 0.559524
|
35c63460571a5936d11a35fd0445b560d9f2c93e
| 3,550
|
py
|
Python
|
bindings/python/ensmallen/datasets/string/megasphaeraspbv3c161.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 5
|
2021-02-17T00:44:45.000Z
|
2021-08-09T16:41:47.000Z
|
bindings/python/ensmallen/datasets/string/megasphaeraspbv3c161.py
|
AnacletoLAB/ensmallen_graph
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 18
|
2021-01-07T16:47:39.000Z
|
2021-08-12T21:51:32.000Z
|
bindings/python/ensmallen/datasets/string/megasphaeraspbv3c161.py
|
AnacletoLAB/ensmallen
|
b2c1b18fb1e5801712852bcc239f239e03076f09
|
[
"MIT"
] | 3
|
2021-01-14T02:20:59.000Z
|
2021-08-04T19:09:52.000Z
|
"""
This file offers the methods to automatically retrieve the graph Megasphaera sp. BV3C161.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def MegasphaeraSpBv3c161(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Megasphaera sp. BV3C161 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.0
- homology.v11.5
- physical.links.v11.0
- physical.links.v11.5
- links.v11.0
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Megasphaera sp. BV3C161 graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="MegasphaeraSpBv3c161",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 32.87037
| 223
| 0.676056
|
c44b6e04afadf75dacf2afd754d87484518bceb0
| 1,105
|
py
|
Python
|
translate.py
|
ivan-mihailov/german_pdfs_ocr_project
|
5a3e6122edf9786fefdf5b7458d7f8bb4dbc891f
|
[
"Apache-2.0"
] | null | null | null |
translate.py
|
ivan-mihailov/german_pdfs_ocr_project
|
5a3e6122edf9786fefdf5b7458d7f8bb4dbc891f
|
[
"Apache-2.0"
] | null | null | null |
translate.py
|
ivan-mihailov/german_pdfs_ocr_project
|
5a3e6122edf9786fefdf5b7458d7f8bb4dbc891f
|
[
"Apache-2.0"
] | null | null | null |
# Imports the Google Cloud Translation library
from google.cloud import translate
# Initialize Translation client
def translate_text(text="YOUR_TEXT_TO_TRANSLATE", project_id="YOUR_PROJECT_ID"):
"""Translating Text."""
client = translate.TranslationServiceClient()
location = "global"
parent = f"projects/{project_id}/locations/{location}"
# Translate text from German to English
# Detail on supported types can be found here:
# https://cloud.google.com/translate/docs/supported-formats
response = client.translate_text(
request={
"parent": parent,
"contents": [text],
"mime_type": "text/plain", # mime types: text/plain, text/html
"source_language_code": "de",
"target_language_code": "en-US",
}
)
# Display the translation for each input text provided
for translation in response.translations:
print("Translated text: {}".format(translation.translated_text))
if __name__ == '__main__':
translate_text(text="YOUR_TEXT_TO_TRANSLATE", project_id="YOUR_PROJECT_ID")
| 32.5
| 80
| 0.682353
|
926b2fe44720345d5af340ea396c53fc3fc7dfc4
| 508
|
py
|
Python
|
scripts/carracing/carracing_random.py
|
robertjankowski/ga-openai
|
f5d034459f783aa8f5103189030fca4153a124fd
|
[
"MIT"
] | 28
|
2019-11-02T13:16:03.000Z
|
2022-03-22T12:08:12.000Z
|
scripts/carracing/carracing_random.py
|
robertjankowski/ga-openai
|
f5d034459f783aa8f5103189030fca4153a124fd
|
[
"MIT"
] | 1
|
2019-12-03T06:54:54.000Z
|
2019-12-18T08:20:42.000Z
|
scripts/carracing/carracing_random.py
|
robertjankowski/ga-openai
|
f5d034459f783aa8f5103189030fca4153a124fd
|
[
"MIT"
] | 8
|
2020-04-30T22:12:27.000Z
|
2021-07-07T20:09:53.000Z
|
import gym
import numpy as np
import torch
from nn.conv import ConvNet
if __name__ == '__main__':
env = gym.make('CarRacing-v0')
obs = env.reset()
nn = ConvNet()
for _ in range(100):
env.render()
obs = torch.from_numpy(np.flip(obs, axis=0).copy()).float()
obs = obs.reshape((-1, 3, 96, 96))
action = nn.forward(obs)
action = action.detach().numpy()
obs, reward, done, _ = env.step(action)
if done:
break
env.close()
| 22.086957
| 67
| 0.564961
|
91621b0af2ca24a14310d420af3101c89da24eb5
| 1,449
|
py
|
Python
|
Step_by_step/complex_lig_cof.py
|
lbfederico/gmx_scripts
|
c6c6faee2671c2612f69cc973e330836218692d5
|
[
"MIT"
] | 2
|
2020-12-30T16:42:55.000Z
|
2021-05-22T01:06:59.000Z
|
Step_by_step/complex_lig_cof.py
|
lbfederico/gmx_scripts
|
c6c6faee2671c2612f69cc973e330836218692d5
|
[
"MIT"
] | null | null | null |
Step_by_step/complex_lig_cof.py
|
lbfederico/gmx_scripts
|
c6c6faee2671c2612f69cc973e330836218692d5
|
[
"MIT"
] | 1
|
2021-05-22T01:07:00.000Z
|
2021-05-22T01:07:00.000Z
|
import sys
if len(sys.argv) < 4:
print('ERROR: Please put protein ligand and cofactor names. ')
else:
prot = sys.argv[1]
lig = sys.argv[2]
cof = sys.argv[3]
# Open and read protein file
file = open(str(prot), 'r')
prot = file.readlines()
#Build complex, Part1 -> prot without box vector (only_prot)
file = open('complex.gro', 'a+')
only_prot = prot[2:-1]
file.writelines([item for item in only_prot])
box_vector = prot[-1]
file.close()
#Add ligand without title and box vector -> add box vector
#Open and read lig and complex
file = open(str(lig), 'r')
lig = file.readlines()
lig_only = lig[2:-1]
file = open(str(cof), 'r')
cof = file.readlines()
cof_only = cof[2:-1]
file = open('complex.gro', 'a+')
file.writelines([item for item in lig_only])
file.writelines([item for item in cof_only])
file.writelines([item for item in box_vector])
file.close()
#Check number of lines (cont) and line2 - as this one without title the number of molecules is cont -1
file = open('complex.gro', 'r')
line = file.readlines()
cont = len(line)
file.close()
#Replace number of lines and title
file = open('complex.gro', 'w')
file.write('complex' + '\n')
file.write(str(cont -1)+ '\n')
file = open('complex.gro', 'a')
file.writelines(line)
file.close()
| 27.339623
| 107
| 0.591442
|
8787363a08bd7aa02ce5c02fc8bc99ccae6630f1
| 30,223
|
py
|
Python
|
tests/func/test_checkout.py
|
lucasalavapena/dvc
|
230eb7087df7f063ded7422af7ae45bd04eb794a
|
[
"Apache-2.0"
] | null | null | null |
tests/func/test_checkout.py
|
lucasalavapena/dvc
|
230eb7087df7f063ded7422af7ae45bd04eb794a
|
[
"Apache-2.0"
] | 117
|
2021-04-05T10:04:24.000Z
|
2022-03-31T20:21:23.000Z
|
tests/func/test_checkout.py
|
lucasalavapena/dvc
|
230eb7087df7f063ded7422af7ae45bd04eb794a
|
[
"Apache-2.0"
] | null | null | null |
import collections
import filecmp
import logging
import os
import shutil
import stat
import textwrap
from unittest.mock import patch
import pytest
from dvc.dvcfile import DVC_FILE_SUFFIX, PIPELINE_FILE, Dvcfile
from dvc.exceptions import (
CheckoutError,
CheckoutErrorSuggestGit,
ConfirmRemoveError,
DvcException,
NoOutputOrStageError,
)
from dvc.fs.local import LocalFileSystem
from dvc.main import main
from dvc.stage import Stage
from dvc.stage.exceptions import StageFileDoesNotExistError
from dvc.system import System
from dvc.utils import relpath
from dvc.utils.fs import remove, walk_files
from dvc.utils.serialize import dump_yaml, load_yaml
from tests.basic_env import TestDvc, TestDvcGit
from tests.func.test_repro import TestRepro
logger = logging.getLogger("dvc")
class TestCheckout(TestRepro):
def setUp(self):
super().setUp()
stages = self.dvc.add(self.DATA_DIR)
self.assertEqual(len(stages), 1)
self.data_dir_stage = stages[0]
self.assertTrue(self.data_dir_stage is not None)
self.orig = "orig"
shutil.copy(self.FOO, self.orig)
os.unlink(self.FOO)
self.orig_dir = "orig_dir"
shutil.copytree(self.DATA_DIR, self.orig_dir)
shutil.rmtree(self.DATA_DIR)
def test(self):
self.dvc.checkout(force=True)
self._test_checkout()
def _test_checkout(self):
self.assertTrue(os.path.isfile(self.FOO))
self.assertTrue(filecmp.cmp(self.FOO, self.orig, shallow=False))
class TestCheckoutSingleStage(TestCheckout):
def test(self):
ret = main(["checkout", "--force", self.foo_stage.path])
self.assertEqual(ret, 0)
ret = main(["checkout", "--force", self.data_dir_stage.path])
self.assertEqual(ret, 0)
self._test_checkout()
class TestCheckoutCorruptedCacheFile(TestRepro):
def test(self):
cache = self.foo_stage.outs[0].cache_path
os.chmod(cache, 0o644)
with open(cache, "a", encoding="utf-8") as fd:
fd.write("1")
with pytest.raises(CheckoutError):
self.dvc.checkout(force=True)
self.assertFalse(os.path.isfile(self.FOO))
self.assertFalse(os.path.isfile(cache))
class TestCheckoutCorruptedCacheDir(TestDvc):
def test(self):
from dvc.objects import load
# NOTE: using 'copy' so that cache and link don't have same inode
ret = main(["config", "cache.type", "copy"])
self.assertEqual(ret, 0)
self.dvc.config.load()
stages = self.dvc.add(self.DATA_DIR)
self.assertEqual(len(stages), 1)
self.assertEqual(len(stages[0].outs), 1)
out = stages[0].outs[0]
# NOTE: modifying cache file for one of the files inside the directory
# to check if dvc will detect that the cache is corrupted.
obj = load(self.dvc.odb.local, out.hash_info)
_, _, entry_oid = list(obj)[0]
cache = self.dvc.odb.local.hash_to_path(entry_oid.value)
os.chmod(cache, 0o644)
with open(cache, "w+", encoding="utf-8") as fobj:
fobj.write("1")
with pytest.raises(CheckoutError):
self.dvc.checkout(force=True)
self.assertFalse(os.path.exists(cache))
class TestCmdCheckout(TestCheckout):
def test(self):
ret = main(["checkout", "--force"])
self.assertEqual(ret, 0)
self._test_checkout()
class CheckoutBase(TestDvcGit):
GIT_IGNORE = ".gitignore"
def commit_data_file(self, fname, content="random text"):
with open(fname, "w", encoding="utf-8") as fd:
fd.write(content)
stages = self.dvc.add(fname)
self.assertEqual(len(stages), 1)
self.assertTrue(stages[0] is not None)
self.dvc.scm.add([fname + ".dvc", ".gitignore"])
self.dvc.scm.commit("adding " + fname)
def read_ignored(self):
with open(self.GIT_IGNORE, encoding="utf-8") as f:
return [s.strip("\n") for s in f.readlines()]
def outs_info(self, stage):
FileInfo = collections.namedtuple("FileInfo", "path inode")
paths = [
path
for output in stage["outs"]
for path in self.dvc.fs.find(output["path"])
]
return [
FileInfo(path=path, inode=System.inode(path)) for path in paths
]
class TestRemoveFilesWhenCheckout(CheckoutBase):
def test(self):
fname = "file_in_a_branch"
branch_master = "master"
branch_1 = "b1"
self.dvc.scm.add(self.dvc.scm.untracked_files())
self.dvc.scm.commit("add all files")
# add the file into a separate branch
self.dvc.scm.checkout(branch_1, True)
ret = main(["checkout", "--force"])
self.assertEqual(ret, 0)
self.commit_data_file(fname)
# Checkout back in master
self.dvc.scm.checkout(branch_master)
self.assertTrue(os.path.exists(fname))
# Make sure `dvc checkout` removes the file
# self.dvc.checkout()
ret = main(["checkout", "--force"])
self.assertEqual(ret, 0)
self.assertFalse(os.path.exists(fname))
class TestCheckoutCleanWorkingDir(CheckoutBase):
@patch("dvc.prompt.confirm", return_value=True)
def test(self, mock_prompt):
mock_prompt.return_value = True
stages = self.dvc.add(self.DATA_DIR)
stage = stages[0]
working_dir_change = os.path.join(self.DATA_DIR, "not_cached.txt")
with open(working_dir_change, "w", encoding="utf-8") as f:
f.write("not_cached")
ret = main(["checkout", stage.relpath])
self.assertEqual(ret, 0)
self.assertFalse(os.path.exists(working_dir_change))
@patch("dvc.prompt.confirm", return_value=False)
def test_force(self, mock_prompt):
mock_prompt.return_value = False
stages = self.dvc.add(self.DATA_DIR)
self.assertEqual(len(stages), 1)
stage = stages[0]
working_dir_change = os.path.join(self.DATA_DIR, "not_cached.txt")
with open(working_dir_change, "w", encoding="utf-8") as f:
f.write("not_cached")
ret = main(["checkout", stage.relpath])
self.assertNotEqual(ret, 0)
mock_prompt.assert_called()
self.assertNotEqual(ret, 0)
self.assertRaises(DvcException)
class TestCheckoutSelectiveRemove(CheckoutBase):
def test(self):
# Use copy to test for changes in the inodes
ret = main(["config", "cache.type", "copy"])
self.assertEqual(ret, 0)
ret = main(["add", self.DATA_DIR])
self.assertEqual(0, ret)
stage_path = self.DATA_DIR + DVC_FILE_SUFFIX
stage = load_yaml(stage_path)
staged_files = self.outs_info(stage)
# move instead of remove, to lock inode assigned to stage_files[0].path
# if we were to use remove, we might end up with same inode assigned to
# newly checked out file
shutil.move(staged_files[0].path, "random_name")
ret = main(["checkout", "--force", stage_path])
self.assertEqual(ret, 0)
checkedout_files = self.outs_info(stage)
self.assertEqual(len(staged_files), len(checkedout_files))
self.assertEqual(staged_files[0].path, checkedout_files[0].path)
self.assertNotEqual(staged_files[0].inode, checkedout_files[0].inode)
self.assertEqual(staged_files[1].inode, checkedout_files[1].inode)
class TestGitIgnoreBasic(CheckoutBase):
def test(self):
fname1 = "file_1"
fname2 = "file_2"
fname3 = "file_3"
self.dvc.scm.add(self.dvc.scm.untracked_files())
self.dvc.scm.commit("add all files")
self.assertFalse(os.path.exists(self.GIT_IGNORE))
self.commit_data_file(fname1)
self.commit_data_file(fname2)
self.dvc.run(
single_stage=True,
cmd=f"python {self.CODE} {self.FOO} {fname3}",
deps=[self.CODE, self.FOO],
outs_no_cache=[fname3],
)
self.assertTrue(os.path.exists(self.GIT_IGNORE))
ignored = self.read_ignored()
self.assertEqual(len(ignored), 2)
self.assertIn("/" + fname1, ignored)
self.assertIn("/" + fname2, ignored)
class TestGitIgnoreWhenCheckout(CheckoutBase):
def test(self):
fname_master = "file_in_a_master"
branch_master = "master"
fname_branch = "file_in_a_branch"
branch_1 = "b1"
self.dvc.scm.add(self.dvc.scm.untracked_files())
self.dvc.scm.commit("add all files")
self.commit_data_file(fname_master)
self.dvc.scm.checkout(branch_1, True)
ret = main(["checkout", "--force"])
self.assertEqual(ret, 0)
self.commit_data_file(fname_branch)
self.dvc.scm.checkout(branch_master)
ret = main(["checkout", "--force"])
self.assertEqual(ret, 0)
ignored = self.read_ignored()
self.assertEqual(len(ignored), 1)
self.assertIn("/" + fname_master, ignored)
self.dvc.scm.checkout(branch_1)
ret = main(["checkout", "--force"])
self.assertEqual(ret, 0)
ignored = self.read_ignored()
self.assertIn("/" + fname_branch, ignored)
class TestCheckoutMissingMd5InStageFile(TestRepro):
def test(self):
d = load_yaml(self.file1_stage)
del d[Stage.PARAM_OUTS][0][LocalFileSystem.PARAM_CHECKSUM]
del d[Stage.PARAM_DEPS][0][LocalFileSystem.PARAM_CHECKSUM]
dump_yaml(self.file1_stage, d)
with pytest.raises(CheckoutError):
self.dvc.checkout(force=True)
class TestCheckoutEmptyDir(TestDvc):
def test(self):
dname = "empty_dir"
os.mkdir(dname)
stages = self.dvc.add(dname)
self.assertEqual(len(stages), 1)
stage = stages[0]
self.assertTrue(stage is not None)
self.assertEqual(len(stage.outs), 1)
stage.outs[0].remove()
self.assertFalse(os.path.exists(dname))
stats = self.dvc.checkout(force=True)
assert stats["added"] == [dname + os.sep]
self.assertTrue(os.path.isdir(dname))
self.assertEqual(len(os.listdir(dname)), 0)
class TestCheckoutNotCachedFile(TestDvc):
def test(self):
cmd = "python {} {} {}".format(self.CODE, self.FOO, "out")
self.dvc.add(self.FOO)
stage = self.dvc.run(
cmd=cmd,
deps=[self.FOO, self.CODE],
outs_no_cache=["out"],
single_stage=True,
)
self.assertTrue(stage is not None)
stats = self.dvc.checkout(force=True)
assert not any(stats.values())
class TestCheckoutWithDeps(TestRepro):
def test(self):
os.unlink(self.FOO)
os.unlink(self.file1)
self.assertFalse(os.path.exists(self.FOO))
self.assertFalse(os.path.exists(self.file1))
ret = main(["checkout", "--force", self.file1_stage, "--with-deps"])
self.assertEqual(ret, 0)
self.assertTrue(os.path.exists(self.FOO))
self.assertTrue(os.path.exists(self.file1))
class TestCheckoutDirectory(TestRepro):
def test(self):
stage = self.dvc.add(self.DATA_DIR)[0]
shutil.rmtree(self.DATA_DIR)
self.assertFalse(os.path.exists(self.DATA_DIR))
ret = main(["checkout", stage.path])
self.assertEqual(ret, 0)
self.assertTrue(os.path.exists(self.DATA_DIR))
class TestCheckoutHook(TestDvc):
@patch("sys.stdout.isatty", return_value=True)
@patch("dvc.prompt.input", side_effect=EOFError)
def test(self, _mock_input, _mock_isatty):
"""Test that dvc checkout handles EOFError gracefully, which is what
it will experience when running in a git hook.
"""
stages = self.dvc.add(self.DATA_DIR)
self.assertEqual(len(stages), 1)
stage = stages[0]
self.assertNotEqual(stage, None)
self.create(os.path.join(self.DATA_DIR, "test"), "test")
with self.assertRaises(ConfirmRemoveError):
self.dvc.checkout()
class TestCheckoutSuggestGit(TestRepro):
def test(self):
# pylint: disable=no-member
try:
self.dvc.checkout(targets="gitbranch")
except DvcException as exc:
self.assertIsInstance(exc, CheckoutErrorSuggestGit)
self.assertIsInstance(exc.__cause__, NoOutputOrStageError)
self.assertIsInstance(
exc.__cause__.__cause__, StageFileDoesNotExistError
)
try:
self.dvc.checkout(targets=self.FOO)
except DvcException as exc:
self.assertIsInstance(exc, CheckoutErrorSuggestGit)
self.assertIsInstance(exc.__cause__, NoOutputOrStageError)
self.assertIsNone(exc.__cause__.__cause__)
try:
self.dvc.checkout(targets="looks-like-dvcfile.dvc")
except DvcException as exc:
self.assertIsInstance(exc, CheckoutErrorSuggestGit)
self.assertIsInstance(exc.__cause__, StageFileDoesNotExistError)
self.assertIsNone(exc.__cause__.__cause__)
class TestCheckoutTargetRecursiveShouldNotRemoveOtherUsedFiles(TestDvc):
def test(self):
ret = main(["add", self.DATA_DIR, self.FOO, self.BAR])
self.assertEqual(0, ret)
ret = main(["checkout", "-R", self.DATA_DIR])
self.assertEqual(0, ret)
self.assertTrue(os.path.exists(self.FOO))
self.assertTrue(os.path.exists(self.BAR))
class TestCheckoutRecursiveNotDirectory(TestDvc):
def test(self):
ret = main(["add", self.FOO])
self.assertEqual(0, ret)
stats = self.dvc.checkout(targets=[self.FOO + ".dvc"], recursive=True)
assert stats == {"added": [], "modified": [], "deleted": []}
class TestCheckoutMovedCacheDirWithSymlinks(TestDvc):
def test(self):
ret = main(["config", "cache.type", "symlink"])
self.assertEqual(ret, 0)
ret = main(["add", self.FOO])
self.assertEqual(ret, 0)
ret = main(["add", self.DATA_DIR])
self.assertEqual(ret, 0)
self.assertTrue(System.is_symlink(self.FOO))
old_foo_link = os.path.realpath(self.FOO)
self.assertTrue(System.is_symlink(self.DATA))
old_data_link = os.path.realpath(self.DATA)
old_cache_dir = self.dvc.odb.local.cache_dir
new_cache_dir = old_cache_dir + "_new"
os.rename(old_cache_dir, new_cache_dir)
ret = main(["cache", "dir", new_cache_dir])
self.assertEqual(ret, 0)
ret = main(["checkout", "-f"])
self.assertEqual(ret, 0)
self.assertTrue(System.is_symlink(self.FOO))
new_foo_link = os.path.realpath(self.FOO)
self.assertTrue(System.is_symlink(self.DATA))
new_data_link = os.path.realpath(self.DATA)
self.assertEqual(
relpath(old_foo_link, old_cache_dir),
relpath(new_foo_link, new_cache_dir),
)
self.assertEqual(
relpath(old_data_link, old_cache_dir),
relpath(new_data_link, new_cache_dir),
)
def test_checkout_no_checksum(tmp_dir, dvc):
tmp_dir.gen("file", "file content")
stage = dvc.run(
outs=["file"], no_exec=True, cmd="somecmd", single_stage=True
)
with pytest.raises(CheckoutError):
dvc.checkout([stage.path], force=True)
assert not os.path.exists("file")
@pytest.mark.parametrize(
"link, link_test_func",
[("hardlink", System.is_hardlink), ("symlink", System.is_symlink)],
)
def test_checkout_relink(tmp_dir, dvc, link, link_test_func):
dvc.odb.local.cache_types = [link]
tmp_dir.dvc_gen({"dir": {"data": "text"}})
dvc.unprotect("dir/data")
assert not link_test_func("dir/data")
stats = dvc.checkout(["dir.dvc"], relink=True)
assert stats == empty_checkout
assert link_test_func("dir/data")
@pytest.mark.parametrize("link", ["hardlink", "symlink", "copy"])
def test_checkout_relink_protected(tmp_dir, dvc, link):
dvc.odb.local.cache_types = [link]
tmp_dir.dvc_gen("foo", "foo")
dvc.unprotect("foo")
assert os.access("foo", os.W_OK)
stats = dvc.checkout(["foo.dvc"], relink=True)
assert stats == empty_checkout
# NOTE: Windows symlink perms don't propagate to the target
if link == "copy" or (link == "symlink" and os.name == "nt"):
assert os.access("foo", os.W_OK)
else:
assert not os.access("foo", os.W_OK)
@pytest.mark.parametrize(
"target",
[os.path.join("dir", "subdir"), os.path.join("dir", "subdir", "file")],
)
def test_partial_checkout(tmp_dir, dvc, target):
tmp_dir.dvc_gen({"dir": {"subdir": {"file": "file"}, "other": "other"}})
shutil.rmtree("dir")
stats = dvc.checkout([target])
assert stats["added"] == ["dir" + os.sep]
assert list(walk_files("dir")) == [os.path.join("dir", "subdir", "file")]
empty_checkout = {"added": [], "deleted": [], "modified": []}
def test_stats_on_empty_checkout(tmp_dir, dvc, scm):
assert dvc.checkout() == empty_checkout
tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}, "other": "other"}},
commit="initial",
)
assert dvc.checkout() == empty_checkout
def test_stats_on_checkout(tmp_dir, dvc, scm):
tmp_dir.dvc_gen(
{
"dir": {"subdir": {"file": "file"}, "other": "other"},
"foo": "foo",
"bar": "bar",
},
commit="initial",
)
scm.checkout("HEAD~")
stats = dvc.checkout()
assert set(stats["deleted"]) == {"dir" + os.sep, "foo", "bar"}
scm.checkout("-")
stats = dvc.checkout()
assert set(stats["added"]) == {"bar", "dir" + os.sep, "foo"}
tmp_dir.gen({"lorem": "lorem", "bar": "new bar", "dir2": {"file": "file"}})
(tmp_dir / "foo").unlink()
scm.gitpython.repo.git.rm("foo.dvc")
tmp_dir.dvc_add(["bar", "lorem", "dir2"], commit="second")
scm.checkout("HEAD~")
stats = dvc.checkout()
assert set(stats["modified"]) == {"bar"}
assert set(stats["added"]) == {"foo"}
assert set(stats["deleted"]) == {"lorem", "dir2" + os.sep}
scm.checkout("-")
stats = dvc.checkout()
assert set(stats["modified"]) == {"bar"}
assert set(stats["added"]) == {"dir2" + os.sep, "lorem"}
assert set(stats["deleted"]) == {"foo"}
@pytest.mark.xfail(reason="values relpath")
def test_checkout_stats_on_failure(tmp_dir, dvc, scm):
tmp_dir.dvc_gen(
{"foo": "foo", "dir": {"subdir": {"file": "file"}}, "other": "other"},
commit="initial",
)
stage = Dvcfile(dvc, "foo.dvc").stage
tmp_dir.dvc_gen({"foo": "foobar", "other": "other other"}, commit="second")
# corrupt cache
cache = stage.outs[0].cache_path
os.chmod(cache, 0o644)
with open(cache, "a", encoding="utf-8") as fd:
fd.write("destroy cache")
scm.checkout("HEAD~")
with pytest.raises(CheckoutError) as exc:
dvc.checkout(force=True)
assert exc.value.stats == {
**empty_checkout,
"failed": ["foo"],
"modified": ["other"],
}
def test_stats_on_added_file_from_tracked_dir(tmp_dir, dvc, scm):
tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}, "other": "other"}},
commit="initial",
)
tmp_dir.gen("dir/subdir/newfile", "newfile")
tmp_dir.dvc_add("dir", commit="add newfile")
scm.checkout("HEAD~")
assert dvc.checkout() == {**empty_checkout, "modified": ["dir" + os.sep]}
assert dvc.checkout() == empty_checkout
scm.checkout("-")
assert dvc.checkout() == {**empty_checkout, "modified": ["dir" + os.sep]}
assert dvc.checkout() == empty_checkout
def test_stats_on_updated_file_from_tracked_dir(tmp_dir, dvc, scm):
tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}, "other": "other"}},
commit="initial",
)
tmp_dir.gen("dir/subdir/file", "what file?")
tmp_dir.dvc_add("dir", commit="update file")
scm.checkout("HEAD~")
assert dvc.checkout() == {**empty_checkout, "modified": ["dir" + os.sep]}
assert dvc.checkout() == empty_checkout
scm.checkout("-")
assert dvc.checkout() == {**empty_checkout, "modified": ["dir" + os.sep]}
assert dvc.checkout() == empty_checkout
def test_stats_on_removed_file_from_tracked_dir(tmp_dir, dvc, scm):
tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}, "other": "other"}},
commit="initial",
)
(tmp_dir / "dir" / "subdir" / "file").unlink()
tmp_dir.dvc_add("dir", commit="removed file from subdir")
scm.checkout("HEAD~")
assert dvc.checkout() == {**empty_checkout, "modified": ["dir" + os.sep]}
assert dvc.checkout() == empty_checkout
scm.checkout("-")
assert dvc.checkout() == {**empty_checkout, "modified": ["dir" + os.sep]}
assert dvc.checkout() == empty_checkout
def test_stats_on_show_changes_does_not_show_summary(
tmp_dir, dvc, scm, capsys
):
tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}}, "other": "other"},
commit="initial",
)
scm.checkout("HEAD~")
assert main(["checkout"]) == 0
out, _ = capsys.readouterr()
assert out.splitlines() == [
f"D\tdir{os.sep}".expandtabs(),
"D\tother".expandtabs(),
]
def test_stats_does_not_show_changes_by_default(tmp_dir, dvc, scm, capsys):
tmp_dir.dvc_gen(
{"dir": {"subdir": {"file": "file"}}, "other": "other"},
commit="initial",
)
scm.checkout("HEAD~")
assert main(["checkout", "--summary"]) == 0
out, _ = capsys.readouterr()
assert "2 files deleted" == out.rstrip()
@pytest.mark.parametrize("link", ["hardlink", "symlink", "copy"])
def test_checkout_with_relink_existing(tmp_dir, dvc, link):
tmp_dir.dvc_gen("foo", "foo")
(tmp_dir / "foo").unlink()
tmp_dir.dvc_gen("bar", "bar")
dvc.odb.local.cache_types = [link]
stats = dvc.checkout(relink=True)
assert stats == {**empty_checkout, "added": ["foo"]}
def test_checkout_with_deps(tmp_dir, dvc):
tmp_dir.dvc_gen({"foo": "foo"})
dvc.run(
fname="copy_file.dvc",
cmd="echo foo > bar",
outs=["bar"],
deps=["foo"],
single_stage=True,
)
(tmp_dir / "bar").unlink()
(tmp_dir / "foo").unlink()
stats = dvc.checkout(["copy_file.dvc"], with_deps=False)
assert stats == {**empty_checkout, "added": ["bar"]}
(tmp_dir / "bar").unlink()
stats = dvc.checkout(["copy_file.dvc"], with_deps=True)
assert set(stats["added"]) == {"foo", "bar"}
def test_checkout_recursive(tmp_dir, dvc):
tmp_dir.gen({"dir": {"foo": "foo", "bar": "bar"}})
dvc.add("dir", recursive=True)
(tmp_dir / "dir" / "foo").unlink()
(tmp_dir / "dir" / "bar").unlink()
stats = dvc.checkout(["dir"], recursive=True)
assert set(stats["added"]) == {
os.path.join("dir", "foo"),
os.path.join("dir", "bar"),
}
@pytest.mark.parametrize(
"workspace", [pytest.lazy_fixture("s3")], indirect=True
)
def test_checkout_for_external_outputs(tmp_dir, dvc, workspace):
workspace.gen("foo", "foo")
file_path = workspace / "foo"
dvc.add("remote://workspace/foo")
odb = dvc.cloud.get_remote_odb("workspace")
odb.fs.remove(str(file_path))
assert not file_path.exists()
stats = dvc.checkout(force=True)
assert stats == {**empty_checkout, "added": ["remote://workspace/foo"]}
assert file_path.exists()
workspace.gen("foo", "foo\nfoo")
stats = dvc.checkout(force=True)
assert stats == {**empty_checkout, "modified": ["remote://workspace/foo"]}
assert file_path.read_text() == "foo"
def test_checkouts_with_different_addressing(tmp_dir, dvc, run_copy):
tmp_dir.gen({"foo": "foo", "lorem": "lorem"})
run_copy("foo", "bar", name="copy-foo-bar")
run_copy("lorem", "ipsum", name="copy-lorem-ipsum")
(tmp_dir / "bar").unlink()
(tmp_dir / "ipsum").unlink()
assert set(dvc.checkout(PIPELINE_FILE)["added"]) == {"bar", "ipsum"}
(tmp_dir / "bar").unlink()
(tmp_dir / "ipsum").unlink()
assert set(dvc.checkout(":")["added"]) == {"bar", "ipsum"}
(tmp_dir / "bar").unlink()
assert dvc.checkout("copy-foo-bar")["added"] == ["bar"]
(tmp_dir / "bar").unlink()
assert dvc.checkout("dvc.yaml:copy-foo-bar")["added"] == ["bar"]
(tmp_dir / "bar").unlink()
assert dvc.checkout(":copy-foo-bar")["added"] == ["bar"]
(tmp_dir / "bar").unlink()
(tmp_dir / "data").mkdir()
with (tmp_dir / "data").chdir():
assert dvc.checkout(relpath(tmp_dir / "dvc.yaml") + ":copy-foo-bar")[
"added"
] == [relpath(tmp_dir / "bar")]
(tmp_dir / "bar").unlink()
assert dvc.checkout("bar")["added"] == ["bar"]
def test_checkouts_on_same_stage_name_and_output_name(tmp_dir, dvc, run_copy):
tmp_dir.gen("foo", "foo")
run_copy("foo", "bar", name="copy-foo-bar")
run_copy("foo", "copy-foo-bar", name="make_collision")
(tmp_dir / "bar").unlink()
(tmp_dir / "copy-foo-bar").unlink()
assert dvc.checkout("copy-foo-bar")["added"] == ["bar"]
assert dvc.checkout("./copy-foo-bar")["added"] == ["copy-foo-bar"]
def test_checkouts_for_pipeline_tracked_outs(tmp_dir, dvc, scm, run_copy):
tmp_dir.gen("foo", "foo")
stage1 = run_copy("foo", "bar", name="copy-foo-bar")
tmp_dir.gen("lorem", "lorem")
stage2 = run_copy("lorem", "ipsum", name="copy-lorem-ipsum")
for out in ["bar", "ipsum"]:
(tmp_dir / out).unlink()
assert dvc.checkout(["bar"])["added"] == ["bar"]
(tmp_dir / "bar").unlink()
assert set(dvc.checkout([PIPELINE_FILE])["added"]) == {"bar", "ipsum"}
for out in ["bar", "ipsum"]:
(tmp_dir / out).unlink()
assert set(dvc.checkout([stage1.addressing])["added"]) == {"bar"}
(tmp_dir / "bar").unlink()
assert set(dvc.checkout([stage2.addressing])["added"]) == {"ipsum"}
(tmp_dir / "ipsum").unlink()
assert set(dvc.checkout()["added"]) == {"bar", "ipsum"}
@pytest.mark.parametrize(
"workspace", [pytest.lazy_fixture("s3")], indirect=True
)
def test_checkout_external_modified_file(tmp_dir, dvc, scm, mocker, workspace):
# regression: happened when file in external output changed and checkout
# was attempted without force, dvc checks if it's present in its cache
# before asking user to remove it.
workspace.gen("foo", "foo")
dvc.add("remote://workspace/foo", external=True)
scm.add(["foo.dvc"])
scm.commit("add foo")
workspace.gen("foo", "foobar") # messing up the external outputs
mocker.patch("dvc.prompt.confirm", return_value=True)
dvc.checkout()
assert (workspace / "foo").read_text() == "foo"
def test_checkout_executable(tmp_dir, dvc):
tmp_dir.dvc_gen("foo", "foo")
contents = (tmp_dir / "foo.dvc").parse()
contents["outs"][0]["isexec"] = True
(tmp_dir / "foo.dvc").dump(contents)
dvc.checkout("foo")
isexec = os.stat("foo").st_mode & stat.S_IEXEC
if os.name == "nt":
# NOTE: you can't set exec bits on Windows
assert not isexec
else:
assert isexec
def test_checkout_partial(tmp_dir, dvc):
tmp_dir.dvc_gen(
{"data": {"foo": "foo", "bar": "bar", "sub_dir": {"baz": "baz"}}}
)
data_dir = tmp_dir / "data"
shutil.rmtree(data_dir)
dvc.checkout(str(data_dir / "foo"))
assert data_dir.read_text() == {"foo": "foo"}
dvc.checkout(str(data_dir / "sub_dir" / "baz"))
assert data_dir.read_text() == {"foo": "foo", "sub_dir": {"baz": "baz"}}
dvc.checkout(str(data_dir / "bar"))
assert data_dir.read_text() == {
"foo": "foo",
"bar": "bar",
"sub_dir": {"baz": "baz"},
}
def test_checkout_partial_unchanged(tmp_dir, dvc):
original_dir_shape = {
"foo": "foo",
"bar": "bar",
"sub_dir": {"baz": "baz"},
"empty_sub_dir": {},
}
tmp_dir.dvc_gen({"data": original_dir_shape})
data_dir = tmp_dir / "data"
sub_dir = data_dir / "sub_dir"
foo = data_dir / "foo"
bar = data_dir / "bar"
sub_dir_file = sub_dir / "baz"
# Nothing changed, nothing added/deleted/modified
stats = dvc.checkout(str(bar))
assert not any(stats.values())
# Irrelevant file changed, still nothing added/deleted/modified
foo.unlink()
stats = dvc.checkout(str(bar))
assert not any(stats.values())
# Relevant change, one modified
bar.unlink()
stats = dvc.checkout(str(bar))
assert len(stats["modified"]) == 1
# No changes inside data/sub
stats = dvc.checkout(str(sub_dir))
assert not any(stats.values())
# Relevant change, one modified
sub_dir_file.unlink()
stats = dvc.checkout(str(sub_dir))
assert len(stats["modified"]) == 1
stats = dvc.checkout(str(data_dir / "empty_sub_dir"))
assert not any(stats.values())
dvc.checkout(str(data_dir))
# Everything is in place, no action taken
stats = dvc.checkout(str(data_dir))
assert not any(stats.values())
def test_checkout_partial_subdir(tmp_dir, dvc):
tmp_dir.dvc_gen(
{"data": {"foo": "foo", "sub_dir": {"bar": "bar", "baz": "baz"}}}
)
data_dir = tmp_dir / "data"
sub_dir = data_dir / "sub_dir"
sub_dir_bar = sub_dir / "baz"
shutil.rmtree(sub_dir)
dvc.checkout(str(sub_dir))
assert data_dir.read_text() == {
"foo": "foo",
"sub_dir": {"bar": "bar", "baz": "baz"},
}
sub_dir_bar.unlink()
dvc.checkout(str(sub_dir_bar))
assert data_dir.read_text() == {
"foo": "foo",
"sub_dir": {"bar": "bar", "baz": "baz"},
}
def test_checkout_file(tmp_dir, dvc):
tmp_dir.dvc_gen("foo", "foo")
stats = dvc.checkout("foo")
assert not any(stats.values())
os.unlink("foo")
stats = dvc.checkout("foo")
assert stats["added"] == ["foo"]
def test_checkout_dir_compat(tmp_dir, dvc):
(stage,) = tmp_dir.dvc_gen({"data": {"foo": "foo"}})
tmp_dir.gen(
"data.dvc",
textwrap.dedent(
f"""\
outs:
- md5: {stage.outs[0].hash_info.value}
path: data
"""
),
)
remove("data")
dvc.checkout()
assert (tmp_dir / "data").read_text() == {"foo": "foo"}
| 30.132602
| 79
| 0.616021
|
3062037ee633edfe608f19b212ba3c925be38888
| 4,829
|
py
|
Python
|
yardstick/benchmark/scenarios/availability/monitor/basemonitor.py
|
rbbratta/yardstick
|
7d1ffcd8eb41e9e4a09ed469b00ae4cbf2715529
|
[
"Apache-2.0"
] | null | null | null |
yardstick/benchmark/scenarios/availability/monitor/basemonitor.py
|
rbbratta/yardstick
|
7d1ffcd8eb41e9e4a09ed469b00ae4cbf2715529
|
[
"Apache-2.0"
] | null | null | null |
yardstick/benchmark/scenarios/availability/monitor/basemonitor.py
|
rbbratta/yardstick
|
7d1ffcd8eb41e9e4a09ed469b00ae4cbf2715529
|
[
"Apache-2.0"
] | null | null | null |
##############################################################################
# Copyright (c) 2015 Huawei Technologies Co.,Ltd. and others
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from __future__ import absolute_import
import pkg_resources
import logging
import multiprocessing
import time
import os
import yardstick.common.utils as utils
import yaml
LOG = logging.getLogger(__name__)
monitor_conf_path = pkg_resources.resource_filename(
"yardstick.benchmark.scenarios.availability",
"monitor_conf.yaml")
class MonitorMgr(object):
"""docstring for MonitorMgr"""
def __init__(self):
self._monitor_list = []
def init_monitors(self, monitor_cfgs, context):
LOG.debug("monitorMgr config: %s", monitor_cfgs)
for monitor_cfg in monitor_cfgs:
monitor_type = monitor_cfg["monitor_type"]
monitor_cls = BaseMonitor.get_monitor_cls(monitor_type)
monitor_ins = monitor_cls(monitor_cfg, context)
if "key" in monitor_cfg:
monitor_ins.key = monitor_cfg["key"]
self._monitor_list.append(monitor_ins)
def __getitem__(self, item):
for obj in self._monitor_list:
if obj.key == item:
return obj
raise KeyError("No such monitor instance of key - %s" % item)
def start_monitors(self):
for _monotor_instace in self._monitor_list:
_monotor_instace.start_monitor()
def wait_monitors(self):
for monitor in self._monitor_list:
monitor.wait_monitor()
def verify_SLA(self):
sla_pass = True
for monitor in self._monitor_list:
sla_pass = sla_pass & monitor.verify_SLA()
return sla_pass
class BaseMonitor(multiprocessing.Process):
"""docstring for BaseMonitor"""
monitor_cfgs = {}
def __init__(self, config, context):
if not BaseMonitor.monitor_cfgs:
with open(monitor_conf_path) as stream:
BaseMonitor.monitor_cfgs = yaml.load(stream)
multiprocessing.Process.__init__(self)
self._config = config
self._context = context
self._queue = multiprocessing.Queue()
self._event = multiprocessing.Event()
self.setup_done = False
@staticmethod
def get_monitor_cls(monitor_type):
"""return monitor class of specified type"""
for monitor in utils.itersubclasses(BaseMonitor):
if monitor_type == monitor.__monitor_type__:
return monitor
raise RuntimeError("No such monitor_type %s" % monitor_type)
def get_script_fullpath(self, path):
base_path = os.path.dirname(monitor_conf_path)
return os.path.join(base_path, path)
def run(self):
LOG.debug("config:%s context:%s", self._config, self._context)
self.setup()
monitor_time = self._config.get("monitor_time", 0)
total_time = 0
outage_time = 0
total_count = 0
outage_count = 0
first_outage = 0
last_outage = 0
begin_time = time.time()
while True:
total_count = total_count + 1
one_check_begin_time = time.time()
exit_status = self.monitor_func()
one_check_end_time = time.time()
if exit_status is False:
outage_count = outage_count + 1
outage_time = outage_time + (
one_check_end_time - one_check_begin_time)
if not first_outage:
first_outage = one_check_begin_time
last_outage = one_check_end_time
if self._event.is_set():
LOG.debug("the monitor process stop")
break
if one_check_end_time - begin_time > monitor_time:
LOG.debug("the monitor max_time finished and exit!")
break
end_time = time.time()
total_time = end_time - begin_time
self._queue.put({"total_time": total_time,
"outage_time": last_outage - first_outage,
"total_count": total_count,
"outage_count": outage_count})
def start_monitor(self):
self.start()
def wait_monitor(self):
self.join()
self._result = self._queue.get()
LOG.debug("the monitor result:%s", self._result)
def setup(self): # pragma: no cover
pass
def monitor_func(self): # pragma: no cover
pass
def verify_SLA(self):
pass
| 31.154839
| 78
| 0.605301
|
157b614e27a552f4b78c7e5d56f6723524fc3d0d
| 12,441
|
py
|
Python
|
Install/AppData/Roaming/Microsoft/AddIns/Perfect_Lecture/pdf2mp4_size_spec.py
|
kennywei815/Perfect_Lecture
|
7f7cf6a06f4dcf86585010584cdf4c6825b94335
|
[
"Apache-2.0"
] | 1
|
2021-08-16T02:51:26.000Z
|
2021-08-16T02:51:26.000Z
|
Source/pdf2mp4_size_spec.py
|
kennywei815/Perfect_Lecture
|
7f7cf6a06f4dcf86585010584cdf4c6825b94335
|
[
"Apache-2.0"
] | null | null | null |
Source/pdf2mp4_size_spec.py
|
kennywei815/Perfect_Lecture
|
7f7cf6a06f4dcf86585010584cdf4c6825b94335
|
[
"Apache-2.0"
] | 1
|
2021-08-16T02:51:27.000Z
|
2021-08-16T02:51:27.000Z
|
import sys
import os
import shutil
import time
import xml.etree.ElementTree as et
import logging
# Logging
logging.basicConfig(stream=sys.stderr, level=logging.INFO) # RELEASE
# logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) # DEBUG
def pause_exit():
## DEBUG
os.system('pause') #DEBUG
exit()
def quotedStr(str_in):
return '"' + str_in + '"'
# Program Setup
print("Executing pdf2mp4.py...")
workDir = os.path.dirname(os.path.realpath(__file__))
codecDir = os.path.join(workDir, 'ImageMagick-portable') #PATH
ttsDir = os.path.join(workDir, 'TTS_engine') #PATH
tmpDir = 'C:\\Temp' #PATH
if not os.path.exists(tmpDir):
os.makedirs(tmpDir)
# Get work dir & filename of PDF file
# [TODO] check sys.argv[1] isn't blank
script_file = sys.argv[1] + '.script.xml'
pdf = sys.argv[1] + '.pdf'
pdfRoot = os.path.basename(sys.argv[1])
pdfDir = os.path.dirname(pdf)
sizeSpec = sys.argv[2]
#audio = os.path.join(tmpDir, pdfRoot + '.wav')
audio = os.path.join(tmpDir, 'source.wav')
image = os.path.join(tmpDir, pdfRoot + '.jpeg')
video = os.path.join(tmpDir, pdfRoot + '.mp4')
audio_specifier = os.path.join(tmpDir, pdfRoot + '-{}.wav')
image_specifier = os.path.join(tmpDir, pdfRoot + '-{}.jpeg')
video_specifier = os.path.join(tmpDir, pdfRoot + '-{}.mp4')
mp4list = os.path.join(tmpDir, pdfRoot + '.mp4list.txt')
text_specifier = os.path.join(tmpDir, pdfRoot + '-{}.xml')
numPage = 0
pageAudio = []
frameRate = []
default_frameRate = 1/5 # default_frameRate = 1/5 frames per second = 12 frames per minute
MAX_FRAME_RATE = 10**5 # [TODO]: adjust
# Step1: Parse Perfect Lecture Script & Run TTS
post_process_script = os.path.join(tmpDir, 'post_process.iscript') #PATH
with open(post_process_script, 'w', encoding = 'UTF-8') as post_process_script_file:
tree = et.ElementTree(file=script_file)
root = tree.getroot()
for page in root:
logging.debug('%s %s %s %s', page.tag, page.attrib, page.text, page.tail) #DEBUG
# for grand_child in page:
# logging.debug('%s %s %s %s', grand_child.tag, grand_child.attrib, grand_child.text, grand_child.tail) #DEBUG
pageNum_source = int(page.attrib['index'])
pageNum = pageNum_source
print('--------------------------------------------------------------------------------------')
print('Processing page {}...'.format(pageNum_source))
pageAudio.append(None)
frameRate.append(default_frameRate)
#[DONE]: more than 1 script section?
script_text = ''
for script in page.findall('script'):
script_text += script.text
logging.debug('%s %s %s %s', script.tag, script.attrib, script.text, script.tail) #DEBUG
if script_text != '':
#parse script
logging.debug('%s', 'Enter if script_text') #DEBUG
#PATH
tts_text = os.path.join(tmpDir, 'source.xml') #PATH
tts_audio = os.path.join(tmpDir, 'source.wav') #PATH
cur_tts_text = text_specifier.format(numPage)
cur_audio = audio_specifier.format(numPage)
#cur_audio = audio
pageAudio[-1] = cur_audio
# [TODO]: parse Perfect Lecture Script from script.text
# implement C-style multi-line comments: /* ... */
in_js_comment = False
processed_text = ''
remain_text = script_text
while remain_text != '':
if in_js_comment:
if '*/' in remain_text:
split_text = remain_text.split('*/', maxsplit=1)
remain_text = split_text[-1]
in_js_comment = False
else:
logging.error(' [ERROR] Find comments starting with dangling /*: the corresponding */ is not found!')
logging.error(' [NOTE] Script after removing /* ... */ comments: \n%s', processed_text+remain_text)
pause_exit()
else:
if '/*' in remain_text:
split_text = remain_text.split('/*', maxsplit=1)
processed_text += split_text[0]
remain_text = split_text[-1]
in_js_comment = True
else:
processed_text += remain_text
remain_text = ''
break
if '*/' in processed_text:
logging.error(' [ERROR] Find comments ending with dangling */: the corresponding /* is not found!!')
logging.error(' [NOTE] Script after removing /* ... */ comments: \n%s', processed_text)
pause_exit()
logging.debug('%s', processed_text) #DEBUG
needTTS = False
with open(cur_tts_text, 'w', encoding = 'UTF-8') as tts_text_file:
for line in processed_text.splitlines():
processed_line = line
# implement XML-style comments: <!-- ... --> by et.ElementTree
# implement C-style single line comments: // ...
processed_line = processed_line.split('//', maxsplit=1) [0].strip().replace('“', '"').replace('”', '"').replace('‘', "'").replace("’", "'").replace("`", "'") #TODO: hot fix (ad-hoc)
# logging.debug(' processed_line = \'%s\'', processed_line) #DEBUG
# cmd_opt = processed_line.split()
# logging.debug(' cmd_opt = \'%s\'', str(cmd_opt)) #DEBUG
# [TODO]: CHECK IT (BEGIN)
# [DONE]: write customized tokenizer: deal with other white space characters (e.g. \t) enclosed by """..."""
logging.debug(' processed_line = \'%s\'', processed_line) #DEBUG
cmd_opt = []
remain_text = processed_line
InField = False
MergeWithPrev = False
field = ''
while remain_text != '':
if InField:
try:
pos = remain_text.index('"""')
except:
cmd_opt.append(field + remain_text)
remain_text = ''
logging.debug('Infield field = \'%s\'', field) #DEBUG
logging.debug('Infield remain_text = \'%s\'', remain_text) #DEBUG
else:
field += remain_text[:pos] # Remove """
cmd_opt.append(field)
remain_text = remain_text[pos+3:]
logging.debug('Infield field = \'%s\'', field) #DEBUG
logging.debug('Infield remain_text = \'%s\'', remain_text) #DEBUG
field = ''
InField = False
else:
try:
pos = remain_text.index('"""')
except:
cmd_opt += remain_text.strip().split()
remain_text = ''
InField = False
logging.debug('Not Infield field = \'%s\'', field) #DEBUG
logging.debug('Not Infield remain_text = \'%s\'', remain_text) #DEBUG
else:
cmd_opt += remain_text[:pos].strip().split() # Remove """
remain_text = remain_text[pos+3:]
InField = True
logging.debug('Not Infield field = \'%s\'', field) #DEBUG
logging.debug('Not Infield remain_text = \'%s\'', remain_text) #DEBUG
field = ''
if field != '':
cmd_opt.append(field)
for j in range(len(cmd_opt)):
if cmd_opt[j][:3] == '"""':
cmd_opt[j] = cmd_opt[j][3:]
if cmd_opt[j][-3:] == '"""':
cmd_opt[j] = cmd_opt[j][:-3]
logging.debug(' cmd_opt = \'%s\'\n\n', str(cmd_opt)) #DEBUG
if len(cmd_opt) >= 2:
cmd = cmd_opt[0].lower()
opt = cmd_opt[1:]
logging.debug(' (cmd, opt) = (%s, %s)', cmd, opt) #DEBUG
print('Command: {}'.format(cmd), end='')
for item in opt:
print('\t"{}"'.format(item), end='')
print('')
# [DONE]: implement 每一頁可以設定接下來(下一頁開始)的速度
if cmd == 'speed':
# use opt[0] only
# [DONE]: check opt[0] is a float and (float(opt[0]) / 60 >0 and < MAX_FRAME_RATE)
if not opt[0].isnumeric():
logging.error(' [ERROR] The argument of "speed" command is not a number')
pause_exit()
if not (0 < float(opt[0]) / 60 < MAX_FRAME_RATE): # Comparisons can be chained arbitrarily in Python
logging.error(' [ERROR] The argument of "speed" command is greater than 0 and less than ' + str(MAX_FRAME_RATE))
pause_exit()
frameRate[-1] = float(opt[0]) / 60 # frames per minute --> frames per second
# [TODO]: implement 自動切換語言
# [DONE]: implement 自動產生SSML
elif cmd == 'say':
# use ' '.join(opt)
tts_text_file.write(' '.join(opt).strip('\'\"“”‘’`') + '\n')
needTTS = True
# Run TTS
# [DONE]: use tts_text & tts_audio
if needTTS:
# [DONE]: 應該改成根據 workDir (程式安裝路徑)
tts_exe = os.path.join(ttsDir, 'TTS_engine.exe')
# print('{} "{}" "{}"'.format(tts_exe, cur_tts_text, cur_audio))
print('Sythesizing narrative...')
os.system('{} "{}" "{}"'.format(tts_exe, cur_tts_text, cur_audio)) # RELEASE)
print('InsertAudio,{},{}'.format(pageNum, quotedStr(cur_audio)), file=post_process_script_file)
else:
pageAudio[-1] = None
# [DONE]: (with animation) make page number conform with JPGs
numPage += 1
# # Step2: Convert PDF to Video
# # Step2.1: Convert PDF to JPGs
# convert_exe = quotedStr(os.path.join(codecDir, 'convert.exe')) #PATH
# os.system(convert_exe + ' -units PixelsPerInch -density 300 -resize {} {} {}'.format(sizeSpec, pdf, image))
# # Step2.2: Convert JPGs to Video
# # V0.2: with audio in each part of animation
# ffmpeg_exe = quotedStr(os.path.join(codecDir, 'ffmpeg.exe')) #PATH
# with open(mp4list, 'w', encoding = 'UTF-8') as f:
# for i in range(numPage):
# cur_audio = audio_specifier.format(i)
# #cur_audio = audio
# cur_image = image_specifier.format(i)
# cur_video = video_specifier.format(i)
# if numPage == 1: #TODO: hot fix (ad-hoc)
# cur_image = os.path.join(tmpDir, pdfRoot + '.jpeg')
# # when audio file exists
# if pageAudio[i]:
# os.system(ffmpeg_exe + ' -i {a} -framerate {f} -i {i} -r 30 -y {v}'.format(a=cur_audio, f=frameRate[i], i=cur_image, v=cur_video)) #c:v libx264 -r 30 -pix_fmt yuv420p -y
# else:
# os.system(ffmpeg_exe + ' -framerate {f} -i {i} -r 30 -y {v}'.format(f=frameRate[i], i=cur_image, v=cur_video)) #c:v libx264 -r 30 -pix_fmt yuv420p -y
# f.write('file \'' + cur_video + '\' \n')
# os.system(ffmpeg_exe + ' -f concat -i {l} -c copy -y {v}'.format(l=mp4list, v=video))
pause_exit()
| 44.274021
| 201
| 0.484045
|
1c50505ed45c8451c46826f283415e79c14171ae
| 18,716
|
py
|
Python
|
kombu/transport/SQS.py
|
alex/kombu
|
3cb79f9c8c4db3c47c66b5419fd7a40d09758b87
|
[
"BSD-3-Clause"
] | 1
|
2015-11-08T13:02:26.000Z
|
2015-11-08T13:02:26.000Z
|
kombu/transport/SQS.py
|
alex/kombu
|
3cb79f9c8c4db3c47c66b5419fd7a40d09758b87
|
[
"BSD-3-Clause"
] | null | null | null |
kombu/transport/SQS.py
|
alex/kombu
|
3cb79f9c8c4db3c47c66b5419fd7a40d09758b87
|
[
"BSD-3-Clause"
] | null | null | null |
"""
kombu.transport.SQS
===================
Amazon SQS transport module for Kombu. This package implements an AMQP-like
interface on top of Amazons SQS service, with the goal of being optimized for
high performance and reliability.
The default settings for this module are focused now on high performance in
task queue situations where tasks are small, idempotent and run very fast.
SQS Features supported by this transport:
Long Polling:
http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/
sqs-long-polling.html
Long polling is enabled by setting the `wait_time_seconds` transport
option to a number > 1. Amazon supports up to 20 seconds. This is
disabled for now, but will be enabled by default in the near future.
Batch API Actions:
http://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/
sqs-batch-api.html
The default behavior of the SQS Channel.drain_events() method is to
request up to the 'prefetch_count' messages on every request to SQS.
These messages are stored locally in a deque object and passed back
to the Transport until the deque is empty, before triggering a new
API call to Amazon.
This behavior dramatically speeds up the rate that you can pull tasks
from SQS when you have short-running tasks (or a large number of workers).
When a Celery worker has multiple queues to monitor, it will pull down
up to 'prefetch_count' messages from queueA and work on them all before
moving on to queueB. If queueB is empty, it will wait up until
'polling_interval' expires before moving back and checking on queueA.
"""
from __future__ import absolute_import
import collections
import socket
import string
from anyjson import loads, dumps
import boto
from boto import exception
from boto import sdb as _sdb
from boto import sqs as _sqs
from boto.sdb.domain import Domain
from boto.sdb.connection import SDBConnection
from boto.sqs.connection import SQSConnection
from boto.sqs.message import Message
from kombu.five import Empty, range, text_t
from kombu.log import get_logger
from kombu.utils import cached_property, uuid
from kombu.utils.encoding import bytes_to_str, safe_str
from kombu.transport.virtual import scheduling
from . import virtual
logger = get_logger(__name__)
# dots are replaced by dash, all other punctuation
# replaced by underscore.
CHARS_REPLACE_TABLE = dict((ord(c), 0x5f)
for c in string.punctuation if c not in '-_.')
CHARS_REPLACE_TABLE[0x2e] = 0x2d # '.' -> '-'
def maybe_int(x):
try:
return int(x)
except ValueError:
return x
BOTO_VERSION = tuple(maybe_int(part) for part in boto.__version__.split('.'))
W_LONG_POLLING = BOTO_VERSION >= (2, 8)
#: SQS bulk get supports a maximum of 10 messages at a time.
SQS_MAX_MESSAGES = 10
class Table(Domain):
"""Amazon SimpleDB domain describing the message routing table."""
# caches queues already bound, so we don't have to declare them again.
_already_bound = set()
def routes_for(self, exchange):
"""Iterator giving all routes for an exchange."""
return self.select("""WHERE exchange = '%s'""" % exchange)
def get_queue(self, queue):
"""Get binding for queue."""
qid = self._get_queue_id(queue)
if qid:
return self.get_item(qid)
def create_binding(self, queue):
"""Get binding item for queue.
Creates the item if it doesn't exist.
"""
item = self.get_queue(queue)
if item:
return item, item['id']
id = uuid()
return self.new_item(id), id
def queue_bind(self, exchange, routing_key, pattern, queue):
if queue not in self._already_bound:
binding, id = self.create_binding(queue)
binding.update(exchange=exchange,
routing_key=routing_key or '',
pattern=pattern or '',
queue=queue or '',
id=id)
binding.save()
self._already_bound.add(queue)
def queue_delete(self, queue):
"""delete queue by name."""
self._already_bound.discard(queue)
item = self._get_queue_item(queue)
if item:
self.delete_item(item)
def exchange_delete(self, exchange):
"""Delete all routes for `exchange`."""
for item in self.routes_for(exchange):
self.delete_item(item['id'])
def get_item(self, item_name):
"""Uses `consistent_read` by default."""
# Domain is an old-style class, can't use super().
for consistent_read in (False, True):
item = Domain.get_item(self, item_name, consistent_read)
if item:
return item
def select(self, query='', next_token=None,
consistent_read=True, max_items=None):
"""Uses `consistent_read` by default."""
query = """SELECT * FROM `%s` %s""" % (self.name, query)
return Domain.select(self, query, next_token,
consistent_read, max_items)
def _try_first(self, query='', **kwargs):
for c in (False, True):
for item in self.select(query, consistent_read=c, **kwargs):
return item
def get_exchanges(self):
return list(set(i['exchange'] for i in self.select()))
def _get_queue_item(self, queue):
return self._try_first("""WHERE queue = '%s' limit 1""" % queue)
def _get_queue_id(self, queue):
item = self._get_queue_item(queue)
if item:
return item['id']
class Channel(virtual.Channel):
Table = Table
default_region = 'us-east-1'
default_visibility_timeout = 1800 # 30 minutes.
default_wait_time_seconds = 0 # disabled see #198
domain_format = 'kombu%(vhost)s'
_sdb = None
_sqs = None
_queue_cache = {}
_noack_queues = set()
def __init__(self, *args, **kwargs):
super(Channel, self).__init__(*args, **kwargs)
# SQS blows up when you try to create a new queue if one already
# exists with a different visibility_timeout, so this prepopulates
# the queue_cache to protect us from recreating
# queues that are known to already exist.
queues = self.sqs.get_all_queues(prefix=self.queue_name_prefix)
for queue in queues:
self._queue_cache[queue.name] = queue
self._fanout_queues = set()
# The drain_events() method stores extra messages in a local
# Deque object. This allows multiple messages to be requested from
# SQS at once for performance, but maintains the same external API
# to the caller of the drain_events() method.
self._queue_message_cache = collections.deque()
def basic_consume(self, queue, no_ack, *args, **kwargs):
if no_ack:
self._noack_queues.add(queue)
return super(Channel, self).basic_consume(
queue, no_ack, *args, **kwargs
)
def basic_cancel(self, consumer_tag):
if consumer_tag in self._consumers:
queue = self._tag_to_queue[consumer_tag]
self._noack_queues.discard(queue)
return super(Channel, self).basic_cancel(consumer_tag)
def drain_events(self, timeout=None):
"""Return a single payload message from one of our queues.
:raises Empty: if no messages available.
"""
# If we're not allowed to consume or have no consumers, raise Empty
if not self._consumers or not self.qos.can_consume():
raise Empty()
message_cache = self._queue_message_cache
# Check if there are any items in our buffer. If there are any, pop
# off that queue first.
try:
return message_cache.popleft()
except IndexError:
pass
# At this point, go and get more messages from SQS
res, queue = self._poll(self.cycle, timeout=timeout)
message_cache.extend((r, queue) for r in res)
# Now try to pop off the queue again.
try:
return message_cache.popleft()
except IndexError:
raise Empty()
def _reset_cycle(self):
"""Reset the consume cycle.
:returns: a FairCycle object that points to our _get_bulk() method
rather than the standard _get() method. This allows for multiple
messages to be returned at once from SQS (based on the prefetch
limit).
"""
self._cycle = scheduling.FairCycle(
self._get_bulk, self._active_queues, Empty,
)
def entity_name(self, name, table=CHARS_REPLACE_TABLE):
"""Format AMQP queue name into a legal SQS queue name."""
return text_t(safe_str(name)).translate(table)
def _new_queue(self, queue, **kwargs):
"""Ensure a queue with given name exists in SQS."""
# Translate to SQS name for consistency with initial
# _queue_cache population.
queue = self.entity_name(self.queue_name_prefix + queue)
try:
return self._queue_cache[queue]
except KeyError:
q = self._queue_cache[queue] = self.sqs.create_queue(
queue, self.visibility_timeout,
)
return q
def queue_bind(self, queue, exchange=None, routing_key='',
arguments=None, **kwargs):
super(Channel, self).queue_bind(queue, exchange, routing_key,
arguments, **kwargs)
if self.typeof(exchange).type == 'fanout':
self._fanout_queues.add(queue)
def _queue_bind(self, *args):
"""Bind ``queue`` to ``exchange`` with routing key.
Route will be stored in SDB if so enabled.
"""
if self.supports_fanout:
self.table.queue_bind(*args)
def get_table(self, exchange):
"""Get routing table.
Retrieved from SDB if :attr:`supports_fanout`.
"""
if self.supports_fanout:
return [(r['routing_key'], r['pattern'], r['queue'])
for r in self.table.routes_for(exchange)]
return super(Channel, self).get_table(exchange)
def get_exchanges(self):
if self.supports_fanout:
return self.table.get_exchanges()
return super(Channel, self).get_exchanges()
def _delete(self, queue, *args):
"""delete queue by name."""
if self.supports_fanout:
self.table.queue_delete(queue)
super(Channel, self)._delete(queue)
self._queue_cache.pop(queue, None)
def exchange_delete(self, exchange, **kwargs):
"""Delete exchange by name."""
if self.supports_fanout:
self.table.exchange_delete(exchange)
super(Channel, self).exchange_delete(exchange, **kwargs)
def _has_queue(self, queue, **kwargs):
"""Return True if ``queue`` was previously declared."""
if self.supports_fanout:
return bool(self.table.get_queue(queue))
return super(Channel, self)._has_queue(queue)
def _put(self, queue, message, **kwargs):
"""Put message onto queue."""
q = self._new_queue(queue)
m = Message()
m.set_body(dumps(message))
q.write(m)
def _put_fanout(self, exchange, message, **kwargs):
"""Deliver fanout message to all queues in ``exchange``."""
for route in self.table.routes_for(exchange):
self._put(route['queue'], message, **kwargs)
def _get_from_sqs(self, queue, count=1):
"""Retrieve messages from SQS and returns the raw SQS message objects.
:returns: List of SQS message objects
"""
q = self._new_queue(queue)
if W_LONG_POLLING and queue not in self._fanout_queues:
return q.get_messages(
count, wait_time_seconds=self.wait_time_seconds,
)
else: # boto < 2.8
return q.get_messages(count)
def _message_to_python(self, message, queue_name, queue):
payload = loads(bytes_to_str(message.get_body()))
if queue_name in self._noack_queues:
queue.delete_message(message)
else:
payload['properties']['delivery_info'].update({
'sqs_message': message, 'sqs_queue': queue,
})
return payload
def _messages_to_python(self, messages, queue):
"""Convert a list of SQS Message objects into Payloads.
This method handles converting SQS Message objects into
Payloads, and appropriately updating the queue depending on
the 'ack' settings for that queue.
:param messages: A list of SQS Message objects.
:param queue: String name representing the queue they came from
:returns: A list of Payload objects
"""
q = self._new_queue(queue)
return [self._message_to_python(m, queue, q) for m in messages]
def _get_bulk(self, queue, max_if_unlimited=SQS_MAX_MESSAGES):
"""Try to retrieve multiple messages off ``queue``.
Where _get() returns a single Payload object, this method returns a
list of Payload objects. The number of objects returned is determined
by the total number of messages available in the queue and the
number of messages that the QoS object allows (based on the
prefetch_count).
.. note::
Ignores QoS limits so caller is responsible for checking
that we are allowed to consume at least one message from the
queue. get_bulk will then ask QoS for an estimate of
the number of extra messages that we can consume.
args:
queue: The queue name (string) to pull from
returns:
payloads: A list of payload objects returned
"""
# drain_events calls `can_consume` first, consuming
# a token, so we know that we are allowed to consume at least
# one message.
maxcount = self.qos.can_consume_max_estimate()
maxcount = max_if_unlimited if maxcount is None else max(maxcount, 1)
if maxcount:
messages = self._get_from_sqs(
queue, count=min(maxcount, SQS_MAX_MESSAGES),
)
if messages:
return self._messages_to_python(messages, queue)
raise Empty()
def _get(self, queue):
"""Try to retrieve a single message off ``queue``."""
messages = self._get_from_sqs(queue, count=1)
if messages:
return self._messages_to_python(messages, queue)[0]
raise Empty()
def _restore(self, message,
unwanted_delivery_info=('sqs_message', 'sqs_queue')):
for unwanted_key in unwanted_delivery_info:
# Remove objects that aren't JSON serializable (Issue #1108).
message.delivery_info.pop(unwanted_key, None)
return super(Channel, self)._restore(message)
def basic_ack(self, delivery_tag):
delivery_info = self.qos.get(delivery_tag).delivery_info
try:
queue = delivery_info['sqs_queue']
except KeyError:
pass
else:
queue.delete_message(delivery_info['sqs_message'])
super(Channel, self).basic_ack(delivery_tag)
def _size(self, queue):
"""Return the number of messages in a queue."""
return self._new_queue(queue).count()
def _purge(self, queue):
"""Delete all current messages in a queue."""
q = self._new_queue(queue)
# SQS is slow at registering messages, so run for a few
# iterations to ensure messages are deleted.
size = 0
for i in range(10):
size += q.count()
if not size:
break
q.clear()
return size
def close(self):
super(Channel, self).close()
for conn in (self._sqs, self._sdb):
if conn:
try:
conn.close()
except AttributeError as exc: # FIXME ???
if "can't set attribute" not in str(exc):
raise
def _get_regioninfo(self, regions):
if self.region:
for _r in regions:
if _r.name == self.region:
return _r
def _aws_connect_to(self, fun, regions):
conninfo = self.conninfo
region = self._get_regioninfo(regions)
return fun(region=region,
aws_access_key_id=conninfo.userid,
aws_secret_access_key=conninfo.password,
port=conninfo.port)
def _next_delivery_tag(self):
return uuid() # See #73
@property
def sqs(self):
if self._sqs is None:
self._sqs = self._aws_connect_to(SQSConnection, _sqs.regions())
return self._sqs
@property
def sdb(self):
if self._sdb is None:
self._sdb = self._aws_connect_to(SDBConnection, _sdb.regions())
return self._sdb
@property
def table(self):
name = self.entity_name(
self.domain_format % {'vhost': self.conninfo.virtual_host})
d = self.sdb.get_object(
'CreateDomain', {'DomainName': name}, self.Table)
d.name = name
return d
@property
def conninfo(self):
return self.connection.client
@property
def transport_options(self):
return self.connection.client.transport_options
@cached_property
def visibility_timeout(self):
return (self.transport_options.get('visibility_timeout') or
self.default_visibility_timeout)
@cached_property
def queue_name_prefix(self):
return self.transport_options.get('queue_name_prefix', '')
@cached_property
def supports_fanout(self):
return self.transport_options.get('sdb_persistence', False)
@cached_property
def region(self):
return self.transport_options.get('region') or self.default_region
@cached_property
def wait_time_seconds(self):
return self.transport_options.get('wait_time_seconds',
self.default_wait_time_seconds)
class Transport(virtual.Transport):
Channel = Channel
polling_interval = 1
wait_time_seconds = 0
default_port = None
connection_errors = (
virtual.Transport.connection_errors +
(exception.SQSError, socket.error)
)
channel_errors = (
virtual.Transport.channel_errors + (exception.SQSDecodeError, )
)
driver_type = 'sqs'
driver_name = 'sqs'
| 34.467772
| 78
| 0.629141
|
2ac3bdc2ee0121c2c3ef4008fdfaed692571ac9c
| 124,600
|
py
|
Python
|
resources/usr/local/lib/python2.7/dist-packages/Cython/Compiler/ModuleNode.py
|
edawson/parliament2
|
2632aa3484ef64c9539c4885026b705b737f6d1e
|
[
"Apache-2.0"
] | 1
|
2017-08-07T14:52:02.000Z
|
2017-08-07T14:52:02.000Z
|
resources/usr/local/lib/python2.7/dist-packages/Cython/Compiler/ModuleNode.py
|
edawson/parliament2
|
2632aa3484ef64c9539c4885026b705b737f6d1e
|
[
"Apache-2.0"
] | 3
|
2020-09-26T01:09:47.000Z
|
2022-02-10T02:12:08.000Z
|
resources/usr/local/lib/python2.7/dist-packages/Cython/Compiler/ModuleNode.py
|
edawson/parliament2
|
2632aa3484ef64c9539c4885026b705b737f6d1e
|
[
"Apache-2.0"
] | 1
|
2020-05-28T23:01:44.000Z
|
2020-05-28T23:01:44.000Z
|
#
# Module parse tree node
#
from __future__ import absolute_import
import cython
cython.declare(Naming=object, Options=object, PyrexTypes=object, TypeSlots=object,
error=object, warning=object, py_object_type=object, UtilityCode=object,
EncodedString=object)
import json
import os
import operator
from .PyrexTypes import CPtrType
from . import Future
from . import Annotate
from . import Code
from . import Naming
from . import Nodes
from . import Options
from . import TypeSlots
from . import PyrexTypes
from .Errors import error, warning
from .PyrexTypes import py_object_type
from ..Utils import open_new_file, replace_suffix, decode_filename
from .Code import UtilityCode
from .StringEncoding import EncodedString
def check_c_declarations_pxd(module_node):
module_node.scope.check_c_classes_pxd()
return module_node
def check_c_declarations(module_node):
module_node.scope.check_c_classes()
module_node.scope.check_c_functions()
return module_node
def generate_c_code_config(env, options):
if Options.annotate or options.annotate:
emit_linenums = False
else:
emit_linenums = options.emit_linenums
return Code.CCodeConfig(
emit_linenums=emit_linenums,
emit_code_comments=env.directives['emit_code_comments'],
c_line_in_traceback=options.c_line_in_traceback)
class ModuleNode(Nodes.Node, Nodes.BlockNode):
# doc string or None
# body StatListNode
#
# referenced_modules [ModuleScope]
# full_module_name string
#
# scope The module scope.
# compilation_source A CompilationSource (see Main)
# directives Top-level compiler directives
child_attrs = ["body"]
directives = None
def merge_in(self, tree, scope, merge_scope=False):
# Merges in the contents of another tree, and possibly scope. With the
# current implementation below, this must be done right prior
# to code generation.
#
# Note: This way of doing it seems strange -- I believe the
# right concept is to split ModuleNode into a ModuleNode and a
# CodeGenerator, and tell that CodeGenerator to generate code
# from multiple sources.
assert isinstance(self.body, Nodes.StatListNode)
if isinstance(tree, Nodes.StatListNode):
self.body.stats.extend(tree.stats)
else:
self.body.stats.append(tree)
self.scope.utility_code_list.extend(scope.utility_code_list)
def extend_if_not_in(L1, L2):
for x in L2:
if x not in L1:
L1.append(x)
extend_if_not_in(self.scope.include_files, scope.include_files)
extend_if_not_in(self.scope.included_files, scope.included_files)
extend_if_not_in(self.scope.python_include_files,
scope.python_include_files)
if merge_scope:
# Ensure that we don't generate import code for these entries!
for entry in scope.c_class_entries:
entry.type.module_name = self.full_module_name
entry.type.scope.directives["internal"] = True
self.scope.merge_in(scope)
def analyse_declarations(self, env):
if not Options.docstrings:
env.doc = self.doc = None
elif Options.embed_pos_in_docstring:
env.doc = EncodedString(u'File: %s (starting at line %s)' % Nodes.relative_position(self.pos))
if not self.doc is None:
env.doc = EncodedString(env.doc + u'\n' + self.doc)
env.doc.encoding = self.doc.encoding
else:
env.doc = self.doc
env.directives = self.directives
self.body.analyse_declarations(env)
def prepare_utility_code(self):
# prepare any utility code that must be created before code generation
# specifically: CythonUtilityCode
env = self.scope
if env.has_import_star:
self.create_import_star_conversion_utility_code(env)
def process_implementation(self, options, result):
env = self.scope
env.return_type = PyrexTypes.c_void_type
self.referenced_modules = []
self.find_referenced_modules(env, self.referenced_modules, {})
self.sort_cdef_classes(env)
self.generate_c_code(env, options, result)
self.generate_h_code(env, options, result)
self.generate_api_code(env, options, result)
def has_imported_c_functions(self):
for module in self.referenced_modules:
for entry in module.cfunc_entries:
if entry.defined_in_pxd:
return 1
return 0
def generate_h_code(self, env, options, result):
def h_entries(entries, api=0, pxd=0):
return [entry for entry in entries
if ((entry.visibility == 'public') or
(api and entry.api) or
(pxd and entry.defined_in_pxd))]
h_types = h_entries(env.type_entries, api=1)
h_vars = h_entries(env.var_entries)
h_funcs = h_entries(env.cfunc_entries)
h_extension_types = h_entries(env.c_class_entries)
if h_types or h_vars or h_funcs or h_extension_types:
result.h_file = replace_suffix(result.c_file, ".h")
h_code = Code.CCodeWriter()
c_code_config = generate_c_code_config(env, options)
Code.GlobalState(h_code, self, c_code_config)
if options.generate_pxi:
result.i_file = replace_suffix(result.c_file, ".pxi")
i_code = Code.PyrexCodeWriter(result.i_file)
else:
i_code = None
h_code.put_generated_by()
h_guard = Naming.h_guard_prefix + self.api_name(env)
h_code.put_h_guard(h_guard)
h_code.putln("")
self.generate_type_header_code(h_types, h_code)
if options.capi_reexport_cincludes:
self.generate_includes(env, [], h_code)
h_code.putln("")
api_guard = Naming.api_guard_prefix + self.api_name(env)
h_code.putln("#ifndef %s" % api_guard)
h_code.putln("")
self.generate_extern_c_macro_definition(h_code)
h_code.putln("")
self.generate_dl_import_macro(h_code)
if h_extension_types:
h_code.putln("")
for entry in h_extension_types:
self.generate_cclass_header_code(entry.type, h_code)
if i_code:
self.generate_cclass_include_code(entry.type, i_code)
if h_funcs:
h_code.putln("")
for entry in h_funcs:
self.generate_public_declaration(entry, h_code, i_code)
if h_vars:
h_code.putln("")
for entry in h_vars:
self.generate_public_declaration(entry, h_code, i_code)
h_code.putln("")
h_code.putln("#endif /* !%s */" % api_guard)
h_code.putln("")
h_code.putln("#if PY_MAJOR_VERSION < 3")
h_code.putln("PyMODINIT_FUNC init%s(void);" % env.module_name)
h_code.putln("#else")
h_code.putln("PyMODINIT_FUNC PyInit_%s(void);" % env.module_name)
h_code.putln("#endif")
h_code.putln("")
h_code.putln("#endif /* !%s */" % h_guard)
f = open_new_file(result.h_file)
try:
h_code.copyto(f)
finally:
f.close()
def generate_public_declaration(self, entry, h_code, i_code):
h_code.putln("%s %s;" % (
Naming.extern_c_macro,
entry.type.declaration_code(
entry.cname, dll_linkage="DL_IMPORT")))
if i_code:
i_code.putln("cdef extern %s" % (
entry.type.declaration_code(entry.cname, pyrex=1)))
def api_name(self, env):
return env.qualified_name.replace(".", "__")
def generate_api_code(self, env, options, result):
def api_entries(entries, pxd=0):
return [entry for entry in entries
if entry.api or (pxd and entry.defined_in_pxd)]
api_vars = api_entries(env.var_entries)
api_funcs = api_entries(env.cfunc_entries)
api_extension_types = api_entries(env.c_class_entries)
if api_vars or api_funcs or api_extension_types:
result.api_file = replace_suffix(result.c_file, "_api.h")
h_code = Code.CCodeWriter()
c_code_config = generate_c_code_config(env, options)
Code.GlobalState(h_code, self, c_code_config)
h_code.put_generated_by()
api_guard = Naming.api_guard_prefix + self.api_name(env)
h_code.put_h_guard(api_guard)
h_code.putln('#include "Python.h"')
if result.h_file:
h_code.putln('#include "%s"' % os.path.basename(result.h_file))
if api_extension_types:
h_code.putln("")
for entry in api_extension_types:
type = entry.type
h_code.putln("static PyTypeObject *%s = 0;" % type.typeptr_cname)
h_code.putln("#define %s (*%s)" % (
type.typeobj_cname, type.typeptr_cname))
if api_funcs:
h_code.putln("")
for entry in api_funcs:
type = CPtrType(entry.type)
cname = env.mangle(Naming.func_prefix_api, entry.name)
h_code.putln("static %s = 0;" % type.declaration_code(cname))
h_code.putln("#define %s %s" % (entry.name, cname))
if api_vars:
h_code.putln("")
for entry in api_vars:
type = CPtrType(entry.type)
cname = env.mangle(Naming.varptr_prefix_api, entry.name)
h_code.putln("static %s = 0;" % type.declaration_code(cname))
h_code.putln("#define %s (*%s)" % (entry.name, cname))
h_code.put(UtilityCode.load_as_string("PyIdentifierFromString", "ImportExport.c")[0])
h_code.put(UtilityCode.load_as_string("ModuleImport", "ImportExport.c")[1])
if api_vars:
h_code.put(UtilityCode.load_as_string("VoidPtrImport", "ImportExport.c")[1])
if api_funcs:
h_code.put(UtilityCode.load_as_string("FunctionImport", "ImportExport.c")[1])
if api_extension_types:
h_code.put(UtilityCode.load_as_string("TypeImport", "ImportExport.c")[1])
h_code.putln("")
h_code.putln("static int import_%s(void) {" % self.api_name(env))
h_code.putln("PyObject *module = 0;")
h_code.putln('module = __Pyx_ImportModule("%s");' % env.qualified_name)
h_code.putln("if (!module) goto bad;")
for entry in api_funcs:
cname = env.mangle(Naming.func_prefix_api, entry.name)
sig = entry.type.signature_string()
h_code.putln(
'if (__Pyx_ImportFunction(module, "%s", (void (**)(void))&%s, "%s") < 0) goto bad;'
% (entry.name, cname, sig))
for entry in api_vars:
cname = env.mangle(Naming.varptr_prefix_api, entry.name)
sig = entry.type.empty_declaration_code()
h_code.putln(
'if (__Pyx_ImportVoidPtr(module, "%s", (void **)&%s, "%s") < 0) goto bad;'
% (entry.name, cname, sig))
h_code.putln("Py_DECREF(module); module = 0;")
for entry in api_extension_types:
self.generate_type_import_call(
entry.type, h_code,
"if (!%s) goto bad;" % entry.type.typeptr_cname)
h_code.putln("return 0;")
h_code.putln("bad:")
h_code.putln("Py_XDECREF(module);")
h_code.putln("return -1;")
h_code.putln("}")
h_code.putln("")
h_code.putln("#endif /* !%s */" % api_guard)
f = open_new_file(result.api_file)
try:
h_code.copyto(f)
finally:
f.close()
def generate_cclass_header_code(self, type, h_code):
h_code.putln("%s %s %s;" % (
Naming.extern_c_macro,
PyrexTypes.public_decl("PyTypeObject", "DL_IMPORT"),
type.typeobj_cname))
def generate_cclass_include_code(self, type, i_code):
i_code.putln("cdef extern class %s.%s:" % (
type.module_name, type.name))
i_code.indent()
var_entries = type.scope.var_entries
if var_entries:
for entry in var_entries:
i_code.putln("cdef %s" % (
entry.type.declaration_code(entry.cname, pyrex=1)))
else:
i_code.putln("pass")
i_code.dedent()
def generate_c_code(self, env, options, result):
modules = self.referenced_modules
if Options.annotate or options.annotate:
rootwriter = Annotate.AnnotationCCodeWriter()
else:
rootwriter = Code.CCodeWriter()
c_code_config = generate_c_code_config(env, options)
globalstate = Code.GlobalState(
rootwriter, self,
code_config=c_code_config,
common_utility_include_dir=options.common_utility_include_dir,
)
globalstate.initialize_main_c_code()
h_code = globalstate['h_code']
self.generate_module_preamble(env, options, modules, result.embedded_metadata, h_code)
globalstate.module_pos = self.pos
globalstate.directives = self.directives
globalstate.use_utility_code(refnanny_utility_code)
code = globalstate['before_global_var']
code.putln('#define __Pyx_MODULE_NAME "%s"' % self.full_module_name)
code.putln("int %s%s = 0;" % (Naming.module_is_main, self.full_module_name.replace('.', '__')))
code.putln("")
code.putln("/* Implementation of '%s' */" % env.qualified_name)
code = globalstate['all_the_rest']
self.generate_cached_builtins_decls(env, code)
self.generate_lambda_definitions(env, code)
# generate normal variable and function definitions
self.generate_variable_definitions(env, code)
self.body.generate_function_definitions(env, code)
code.mark_pos(None)
self.generate_typeobj_definitions(env, code)
self.generate_method_table(env, code)
if env.has_import_star:
self.generate_import_star(env, code)
self.generate_pymoduledef_struct(env, code)
# init_globals is inserted before this
self.generate_module_init_func(modules[:-1], env, globalstate['init_module'])
self.generate_module_cleanup_func(env, globalstate['cleanup_module'])
if Options.embed:
self.generate_main_method(env, globalstate['main_method'])
self.generate_filename_table(globalstate['filename_table'])
self.generate_declarations_for_modules(env, modules, globalstate)
h_code.write('\n')
for utilcode in env.utility_code_list[:]:
globalstate.use_utility_code(utilcode)
globalstate.finalize_main_c_code()
f = open_new_file(result.c_file)
try:
rootwriter.copyto(f)
finally:
f.close()
result.c_file_generated = 1
if options.gdb_debug:
self._serialize_lineno_map(env, rootwriter)
if Options.annotate or options.annotate:
self._generate_annotations(rootwriter, result, options)
def _generate_annotations(self, rootwriter, result, options):
self.annotate(rootwriter)
coverage_xml_filename = Options.annotate_coverage_xml or options.annotate_coverage_xml
if coverage_xml_filename and os.path.exists(coverage_xml_filename):
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
coverage_xml = ET.parse(coverage_xml_filename).getroot()
for el in coverage_xml.getiterator():
el.tail = None # save some memory
else:
coverage_xml = None
rootwriter.save_annotation(result.main_source_file, result.c_file, coverage_xml=coverage_xml)
# if we included files, additionally generate one annotation file for each
if not self.scope.included_files:
return
search_include_file = self.scope.context.search_include_directories
target_dir = os.path.abspath(os.path.dirname(result.c_file))
for included_file in self.scope.included_files:
target_file = os.path.abspath(os.path.join(target_dir, included_file))
target_file_dir = os.path.dirname(target_file)
if not target_file_dir.startswith(target_dir):
# any other directories may not be writable => avoid trying
continue
source_file = search_include_file(included_file, "", self.pos, include=True)
if not source_file:
continue
if target_file_dir != target_dir and not os.path.exists(target_file_dir):
try:
os.makedirs(target_file_dir)
except OSError as e:
import errno
if e.errno != errno.EEXIST:
raise
rootwriter.save_annotation(source_file, target_file, coverage_xml=coverage_xml)
def _serialize_lineno_map(self, env, ccodewriter):
tb = env.context.gdb_debug_outputwriter
markers = ccodewriter.buffer.allmarkers()
d = {}
for c_lineno, cython_lineno in enumerate(markers):
if cython_lineno > 0:
d.setdefault(cython_lineno, []).append(c_lineno + 1)
tb.start('LineNumberMapping')
for cython_lineno, c_linenos in sorted(d.items()):
attrs = {
'c_linenos': ' '.join(map(str, c_linenos)),
'cython_lineno': str(cython_lineno),
}
tb.start('LineNumber', attrs)
tb.end('LineNumber')
tb.end('LineNumberMapping')
tb.serialize()
def find_referenced_modules(self, env, module_list, modules_seen):
if env not in modules_seen:
modules_seen[env] = 1
for imported_module in env.cimported_modules:
self.find_referenced_modules(imported_module, module_list, modules_seen)
module_list.append(env)
def sort_types_by_inheritance(self, type_dict, type_order, getkey):
# copy the types into a list moving each parent type before
# its first child
type_list = []
for i, key in enumerate(type_order):
new_entry = type_dict[key]
# collect all base classes to check for children
hierarchy = set()
base = new_entry
while base:
base_type = base.type.base_type
if not base_type:
break
base_key = getkey(base_type)
hierarchy.add(base_key)
base = type_dict.get(base_key)
new_entry.base_keys = hierarchy
# find the first (sub-)subclass and insert before that
for j in range(i):
entry = type_list[j]
if key in entry.base_keys:
type_list.insert(j, new_entry)
break
else:
type_list.append(new_entry)
return type_list
def sort_type_hierarchy(self, module_list, env):
# poor developer's OrderedDict
vtab_dict, vtab_dict_order = {}, []
vtabslot_dict, vtabslot_dict_order = {}, []
for module in module_list:
for entry in module.c_class_entries:
if entry.used and not entry.in_cinclude:
type = entry.type
key = type.vtabstruct_cname
if not key:
continue
if key in vtab_dict:
# FIXME: this should *never* happen, but apparently it does
# for Cython generated utility code
from .UtilityCode import NonManglingModuleScope
assert isinstance(entry.scope, NonManglingModuleScope), str(entry.scope)
assert isinstance(vtab_dict[key].scope, NonManglingModuleScope), str(vtab_dict[key].scope)
else:
vtab_dict[key] = entry
vtab_dict_order.append(key)
all_defined_here = module is env
for entry in module.type_entries:
if entry.used and (all_defined_here or entry.defined_in_pxd):
type = entry.type
if type.is_extension_type and not entry.in_cinclude:
type = entry.type
key = type.objstruct_cname
assert key not in vtabslot_dict, key
vtabslot_dict[key] = entry
vtabslot_dict_order.append(key)
def vtabstruct_cname(entry_type):
return entry_type.vtabstruct_cname
vtab_list = self.sort_types_by_inheritance(
vtab_dict, vtab_dict_order, vtabstruct_cname)
def objstruct_cname(entry_type):
return entry_type.objstruct_cname
vtabslot_list = self.sort_types_by_inheritance(
vtabslot_dict, vtabslot_dict_order, objstruct_cname)
return (vtab_list, vtabslot_list)
def sort_cdef_classes(self, env):
key_func = operator.attrgetter('objstruct_cname')
entry_dict, entry_order = {}, []
for entry in env.c_class_entries:
key = key_func(entry.type)
assert key not in entry_dict, key
entry_dict[key] = entry
entry_order.append(key)
env.c_class_entries[:] = self.sort_types_by_inheritance(
entry_dict, entry_order, key_func)
def generate_type_definitions(self, env, modules, vtab_list, vtabslot_list, code):
# TODO: Why are these separated out?
for entry in vtabslot_list:
self.generate_objstruct_predeclaration(entry.type, code)
vtabslot_entries = set(vtabslot_list)
for module in modules:
definition = module is env
if definition:
type_entries = module.type_entries
else:
type_entries = []
for entry in module.type_entries:
if entry.defined_in_pxd:
type_entries.append(entry)
type_entries = [t for t in type_entries if t not in vtabslot_entries]
self.generate_type_header_code(type_entries, code)
for entry in vtabslot_list:
self.generate_objstruct_definition(entry.type, code)
self.generate_typeobj_predeclaration(entry, code)
for entry in vtab_list:
self.generate_typeobj_predeclaration(entry, code)
self.generate_exttype_vtable_struct(entry, code)
self.generate_exttype_vtabptr_declaration(entry, code)
self.generate_exttype_final_methods_declaration(entry, code)
def generate_declarations_for_modules(self, env, modules, globalstate):
typecode = globalstate['type_declarations']
typecode.putln("")
typecode.putln("/*--- Type declarations ---*/")
# This is to work around the fact that array.h isn't part of the C-API,
# but we need to declare it earlier than utility code.
if 'cpython.array' in [m.qualified_name for m in modules]:
typecode.putln('#ifndef _ARRAYARRAY_H')
typecode.putln('struct arrayobject;')
typecode.putln('typedef struct arrayobject arrayobject;')
typecode.putln('#endif')
vtab_list, vtabslot_list = self.sort_type_hierarchy(modules, env)
self.generate_type_definitions(
env, modules, vtab_list, vtabslot_list, typecode)
modulecode = globalstate['module_declarations']
for module in modules:
defined_here = module is env
modulecode.putln("")
modulecode.putln("/* Module declarations from '%s' */" % module.qualified_name)
self.generate_c_class_declarations(module, modulecode, defined_here)
self.generate_cvariable_declarations(module, modulecode, defined_here)
self.generate_cfunction_declarations(module, modulecode, defined_here)
def _put_setup_code(self, code, name):
code.put(UtilityCode.load_as_string(name, "ModuleSetupCode.c")[1])
def generate_module_preamble(self, env, options, cimported_modules, metadata, code):
code.put_generated_by()
if metadata:
code.putln("/* BEGIN: Cython Metadata")
code.putln(json.dumps(metadata, indent=4, sort_keys=True))
code.putln("END: Cython Metadata */")
code.putln("")
code.putln("#define PY_SSIZE_T_CLEAN")
for filename in env.python_include_files:
code.putln('#include "%s"' % filename)
code.putln("#ifndef Py_PYTHON_H")
code.putln(" #error Python headers needed to compile C extensions, "
"please install development version of Python.")
code.putln("#elif PY_VERSION_HEX < 0x02060000 || "
"(0x03000000 <= PY_VERSION_HEX && PY_VERSION_HEX < 0x03020000)")
code.putln(" #error Cython requires Python 2.6+ or Python 3.2+.")
code.putln("#else")
code.globalstate["end"].putln("#endif /* Py_PYTHON_H */")
from .. import __version__
code.putln('#define CYTHON_ABI "%s"' % __version__.replace('.', '_'))
self._put_setup_code(code, "CModulePreamble")
if env.context.options.cplus:
self._put_setup_code(code, "CppInitCode")
else:
self._put_setup_code(code, "CInitCode")
self._put_setup_code(code, "MathInitCode")
if options.c_line_in_traceback:
cinfo = "%s = %s; " % (Naming.clineno_cname, Naming.line_c_macro)
else:
cinfo = ""
code.put("""
#define __PYX_ERR(f_index, lineno, Ln_error) \\
{ \\
%s = %s[f_index]; %s = lineno; %sgoto Ln_error; \\
}
""" % (Naming.filename_cname, Naming.filetable_cname, Naming.lineno_cname, cinfo))
code.put("""
#if PY_MAJOR_VERSION >= 3
#define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)
#define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)
#else
""")
if Future.division in env.context.future_directives:
code.putln(" #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y)")
code.putln(" #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y)")
else:
code.putln(" #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y)")
code.putln(" #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y)")
code.putln("#endif")
code.putln("")
self.generate_extern_c_macro_definition(code)
code.putln("")
code.putln("#define %s" % Naming.h_guard_prefix + self.api_name(env))
code.putln("#define %s" % Naming.api_guard_prefix + self.api_name(env))
self.generate_includes(env, cimported_modules, code)
code.putln("")
code.putln("#ifdef PYREX_WITHOUT_ASSERTIONS")
code.putln("#define CYTHON_WITHOUT_ASSERTIONS")
code.putln("#endif")
code.putln("")
if env.directives['ccomplex']:
code.putln("")
code.putln("#if !defined(CYTHON_CCOMPLEX)")
code.putln("#define CYTHON_CCOMPLEX 1")
code.putln("#endif")
code.putln("")
code.put(UtilityCode.load_as_string("UtilityFunctionPredeclarations", "ModuleSetupCode.c")[0])
c_string_type = env.directives['c_string_type']
c_string_encoding = env.directives['c_string_encoding']
if c_string_type not in ('bytes', 'bytearray') and not c_string_encoding:
error(self.pos, "a default encoding must be provided if c_string_type is not a byte type")
code.putln('#define __PYX_DEFAULT_STRING_ENCODING_IS_ASCII %s' % int(c_string_encoding == 'ascii'))
if c_string_encoding == 'default':
code.putln('#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 1')
else:
code.putln('#define __PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT 0')
code.putln('#define __PYX_DEFAULT_STRING_ENCODING "%s"' % c_string_encoding)
if c_string_type == 'bytearray':
c_string_func_name = 'ByteArray'
else:
c_string_func_name = c_string_type.title()
code.putln('#define __Pyx_PyObject_FromString __Pyx_Py%s_FromString' % c_string_func_name)
code.putln('#define __Pyx_PyObject_FromStringAndSize __Pyx_Py%s_FromStringAndSize' % c_string_func_name)
code.put(UtilityCode.load_as_string("TypeConversions", "TypeConversion.c")[0])
# These utility functions are assumed to exist and used elsewhere.
PyrexTypes.c_long_type.create_to_py_utility_code(env)
PyrexTypes.c_long_type.create_from_py_utility_code(env)
PyrexTypes.c_int_type.create_from_py_utility_code(env)
code.put(Nodes.branch_prediction_macros)
code.putln('')
code.putln('static PyObject *%s;' % env.module_cname)
code.putln('static PyObject *%s;' % env.module_dict_cname)
code.putln('static PyObject *%s;' % Naming.builtins_cname)
code.putln('static PyObject *%s;' % Naming.empty_tuple)
code.putln('static PyObject *%s;' % Naming.empty_bytes)
code.putln('static PyObject *%s;' % Naming.empty_unicode)
if Options.pre_import is not None:
code.putln('static PyObject *%s;' % Naming.preimport_cname)
code.putln('static int %s;' % Naming.lineno_cname)
code.putln('static int %s = 0;' % Naming.clineno_cname)
code.putln('static const char * %s= %s;' % (Naming.cfilenm_cname, Naming.file_c_macro))
code.putln('static const char *%s;' % Naming.filename_cname)
def generate_extern_c_macro_definition(self, code):
name = Naming.extern_c_macro
code.putln("#ifndef %s" % name)
code.putln(" #ifdef __cplusplus")
code.putln(' #define %s extern "C"' % name)
code.putln(" #else")
code.putln(" #define %s extern" % name)
code.putln(" #endif")
code.putln("#endif")
def generate_dl_import_macro(self, code):
code.putln("#ifndef DL_IMPORT")
code.putln(" #define DL_IMPORT(_T) _T")
code.putln("#endif")
def generate_includes(self, env, cimported_modules, code):
includes = []
for filename in env.include_files:
byte_decoded_filenname = str(filename)
if byte_decoded_filenname[0] == '<' and byte_decoded_filenname[-1] == '>':
code.putln('#include %s' % byte_decoded_filenname)
else:
code.putln('#include "%s"' % byte_decoded_filenname)
code.putln_openmp("#include <omp.h>")
def generate_filename_table(self, code):
import os.path as path
full_module_path = path.join(*self.full_module_name.split('.'))
module_abspath = path.splitext(path.abspath(
self.compilation_source.source_desc.get_filenametable_entry()))[0]
root_path = module_abspath[:-len(full_module_path)]
workdir = path.abspath(os.getcwd()) + os.sep
if root_path.startswith(workdir):
# prefer relative paths to current directory (which is most likely the project root)
root_path = workdir
code.putln("")
code.putln("static const char *%s[] = {" % Naming.filetable_cname)
if code.globalstate.filename_list:
for source_desc in code.globalstate.filename_list:
file_abspath = path.abspath(source_desc.get_filenametable_entry())
if file_abspath.startswith(root_path):
filename = file_abspath[len(root_path):]
else:
filename = path.basename(file_abspath)
escaped_filename = filename.replace("\\", "\\\\").replace('"', r'\"')
code.putln('"%s",' % escaped_filename)
else:
# Some C compilers don't like an empty array
code.putln("0")
code.putln("};")
def generate_type_predeclarations(self, env, code):
pass
def generate_type_header_code(self, type_entries, code):
# Generate definitions of structs/unions/enums/typedefs/objstructs.
#self.generate_gcc33_hack(env, code) # Is this still needed?
# Forward declarations
for entry in type_entries:
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
pass
elif type.is_struct_or_union or type.is_cpp_class:
self.generate_struct_union_predeclaration(entry, code)
elif type.is_ctuple and entry.used:
self.generate_struct_union_predeclaration(entry.type.struct_entry, code)
elif type.is_extension_type:
self.generate_objstruct_predeclaration(type, code)
# Actual declarations
for entry in type_entries:
if not entry.in_cinclude:
#print "generate_type_header_code:", entry.name, repr(entry.type) ###
type = entry.type
if type.is_typedef: # Must test this first!
self.generate_typedef(entry, code)
elif type.is_enum:
self.generate_enum_definition(entry, code)
elif type.is_struct_or_union:
self.generate_struct_union_definition(entry, code)
elif type.is_ctuple and entry.used:
self.generate_struct_union_definition(entry.type.struct_entry, code)
elif type.is_cpp_class:
self.generate_cpp_class_definition(entry, code)
elif type.is_extension_type:
self.generate_objstruct_definition(type, code)
def generate_gcc33_hack(self, env, code):
# Workaround for spurious warning generation in gcc 3.3
code.putln("")
for entry in env.c_class_entries:
type = entry.type
if not type.typedef_flag:
name = type.objstruct_cname
if name.startswith("__pyx_"):
tail = name[6:]
else:
tail = name
code.putln("typedef struct %s __pyx_gcc33_%s;" % (
name, tail))
def generate_typedef(self, entry, code):
base_type = entry.type.typedef_base_type
if base_type.is_numeric:
try:
writer = code.globalstate['numeric_typedefs']
except KeyError:
writer = code
else:
writer = code
writer.mark_pos(entry.pos)
writer.putln("typedef %s;" % base_type.declaration_code(entry.cname))
def sue_predeclaration(self, type, kind, name):
if type.typedef_flag:
return "%s %s;\ntypedef %s %s %s;" % (
kind, name,
kind, name, name)
else:
return "%s %s;" % (kind, name)
def generate_struct_union_predeclaration(self, entry, code):
type = entry.type
if type.is_cpp_class and type.templates:
code.putln("template <typename %s>" % ", typename ".join(
[T.empty_declaration_code() for T in type.templates]))
code.putln(self.sue_predeclaration(type, type.kind, type.cname))
def sue_header_footer(self, type, kind, name):
header = "%s %s {" % (kind, name)
footer = "};"
return header, footer
def generate_struct_union_definition(self, entry, code):
code.mark_pos(entry.pos)
type = entry.type
scope = type.scope
if scope:
kind = type.kind
packed = type.is_struct and type.packed
if packed:
kind = "%s %s" % (type.kind, "__Pyx_PACKED")
code.globalstate.use_utility_code(packed_struct_utility_code)
header, footer = \
self.sue_header_footer(type, kind, type.cname)
if packed:
code.putln("#if defined(__SUNPRO_C)")
code.putln(" #pragma pack(1)")
code.putln("#elif !defined(__GNUC__)")
code.putln(" #pragma pack(push, 1)")
code.putln("#endif")
code.putln(header)
var_entries = scope.var_entries
if not var_entries:
error(entry.pos, "Empty struct or union definition not allowed outside a 'cdef extern from' block")
for attr in var_entries:
code.putln(
"%s;" % attr.type.declaration_code(attr.cname))
code.putln(footer)
if packed:
code.putln("#if defined(__SUNPRO_C)")
code.putln(" #pragma pack()")
code.putln("#elif !defined(__GNUC__)")
code.putln(" #pragma pack(pop)")
code.putln("#endif")
def generate_cpp_class_definition(self, entry, code):
code.mark_pos(entry.pos)
type = entry.type
scope = type.scope
if scope:
if type.templates:
code.putln("template <class %s>" % ", class ".join(
[T.empty_declaration_code() for T in type.templates]))
# Just let everything be public.
code.put("struct %s" % type.cname)
if type.base_classes:
base_class_decl = ", public ".join(
[base_class.empty_declaration_code() for base_class in type.base_classes])
code.put(" : public %s" % base_class_decl)
code.putln(" {")
has_virtual_methods = False
has_destructor = False
for attr in scope.var_entries:
if attr.type.is_cfunction and attr.type.is_static_method:
code.put("static ")
elif attr.type.is_cfunction and attr.name != "<init>":
code.put("virtual ")
has_virtual_methods = True
if attr.cname[0] == '~':
has_destructor = True
code.putln("%s;" % attr.type.declaration_code(attr.cname))
if has_virtual_methods and not has_destructor:
code.put("virtual ~%s() { }" % type.cname)
code.putln("};")
def generate_enum_definition(self, entry, code):
code.mark_pos(entry.pos)
type = entry.type
name = entry.cname or entry.name or ""
header, footer = self.sue_header_footer(type, "enum", name)
code.putln(header)
enum_values = entry.enum_values
if not enum_values:
error(entry.pos, "Empty enum definition not allowed outside a 'cdef extern from' block")
else:
last_entry = enum_values[-1]
# this does not really generate code, just builds the result value
for value_entry in enum_values:
if value_entry.value_node is not None:
value_entry.value_node.generate_evaluation_code(code)
for value_entry in enum_values:
if value_entry.value_node is None:
value_code = value_entry.cname
else:
value_code = ("%s = %s" % (
value_entry.cname,
value_entry.value_node.result()))
if value_entry is not last_entry:
value_code += ","
code.putln(value_code)
code.putln(footer)
if entry.type.typedef_flag:
# Not pre-declared.
code.putln("typedef enum %s %s;" % (name, name))
def generate_typeobj_predeclaration(self, entry, code):
code.putln("")
name = entry.type.typeobj_cname
if name:
if entry.visibility == 'extern' and not entry.in_cinclude:
code.putln("%s %s %s;" % (
Naming.extern_c_macro,
PyrexTypes.public_decl("PyTypeObject", "DL_IMPORT"),
name))
elif entry.visibility == 'public':
code.putln("%s %s %s;" % (
Naming.extern_c_macro,
PyrexTypes.public_decl("PyTypeObject", "DL_EXPORT"),
name))
# ??? Do we really need the rest of this? ???
#else:
# code.putln("static PyTypeObject %s;" % name)
def generate_exttype_vtable_struct(self, entry, code):
if not entry.used:
return
code.mark_pos(entry.pos)
# Generate struct declaration for an extension type's vtable.
type = entry.type
scope = type.scope
self.specialize_fused_types(scope)
if type.vtabstruct_cname:
code.putln("")
code.putln("struct %s {" % type.vtabstruct_cname)
if type.base_type and type.base_type.vtabstruct_cname:
code.putln("struct %s %s;" % (
type.base_type.vtabstruct_cname,
Naming.obj_base_cname))
for method_entry in scope.cfunc_entries:
if not method_entry.is_inherited:
code.putln("%s;" % method_entry.type.declaration_code("(*%s)" % method_entry.cname))
code.putln("};")
def generate_exttype_vtabptr_declaration(self, entry, code):
if not entry.used:
return
code.mark_pos(entry.pos)
# Generate declaration of pointer to an extension type's vtable.
type = entry.type
if type.vtabptr_cname:
code.putln("static struct %s *%s;" % (
type.vtabstruct_cname,
type.vtabptr_cname))
def generate_exttype_final_methods_declaration(self, entry, code):
if not entry.used:
return
code.mark_pos(entry.pos)
# Generate final methods prototypes
type = entry.type
for method_entry in entry.type.scope.cfunc_entries:
if not method_entry.is_inherited and method_entry.final_func_cname:
declaration = method_entry.type.declaration_code(
method_entry.final_func_cname)
modifiers = code.build_function_modifiers(method_entry.func_modifiers)
code.putln("static %s%s;" % (modifiers, declaration))
def generate_objstruct_predeclaration(self, type, code):
if not type.scope:
return
code.putln(self.sue_predeclaration(type, "struct", type.objstruct_cname))
def generate_objstruct_definition(self, type, code):
code.mark_pos(type.pos)
# Generate object struct definition for an
# extension type.
if not type.scope:
return # Forward declared but never defined
header, footer = \
self.sue_header_footer(type, "struct", type.objstruct_cname)
code.putln(header)
base_type = type.base_type
if base_type:
basestruct_cname = base_type.objstruct_cname
if basestruct_cname == "PyTypeObject":
# User-defined subclasses of type are heap allocated.
basestruct_cname = "PyHeapTypeObject"
code.putln(
"%s%s %s;" % (
("struct ", "")[base_type.typedef_flag],
basestruct_cname,
Naming.obj_base_cname))
else:
code.putln(
"PyObject_HEAD")
if type.vtabslot_cname and not (type.base_type and type.base_type.vtabslot_cname):
code.putln(
"struct %s *%s;" % (
type.vtabstruct_cname,
type.vtabslot_cname))
for attr in type.scope.var_entries:
if attr.is_declared_generic:
attr_type = py_object_type
else:
attr_type = attr.type
code.putln(
"%s;" % attr_type.declaration_code(attr.cname))
code.putln(footer)
if type.objtypedef_cname is not None:
# Only for exposing public typedef name.
code.putln("typedef struct %s %s;" % (type.objstruct_cname, type.objtypedef_cname))
def generate_c_class_declarations(self, env, code, definition):
for entry in env.c_class_entries:
if definition or entry.defined_in_pxd:
code.putln("static PyTypeObject *%s = 0;" % (
entry.type.typeptr_cname))
def generate_cvariable_declarations(self, env, code, definition):
if env.is_cython_builtin:
return
for entry in env.var_entries:
if (entry.in_cinclude or entry.in_closure or
(entry.visibility == 'private' and not (entry.defined_in_pxd or entry.used))):
continue
storage_class = None
dll_linkage = None
init = None
if entry.visibility == 'extern':
storage_class = Naming.extern_c_macro
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'public':
storage_class = Naming.extern_c_macro
if definition:
dll_linkage = "DL_EXPORT"
else:
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'private':
storage_class = "static"
dll_linkage = None
if entry.init is not None:
init = entry.type.literal_code(entry.init)
type = entry.type
cname = entry.cname
if entry.defined_in_pxd and not definition:
storage_class = "static"
dll_linkage = None
type = CPtrType(type)
cname = env.mangle(Naming.varptr_prefix, entry.name)
init = 0
if storage_class:
code.put("%s " % storage_class)
code.put(type.declaration_code(
cname, dll_linkage=dll_linkage))
if init is not None:
code.put_safe(" = %s" % init)
code.putln(";")
if entry.cname != cname:
code.putln("#define %s (*%s)" % (entry.cname, cname))
def generate_cfunction_declarations(self, env, code, definition):
for entry in env.cfunc_entries:
if entry.used or (entry.visibility == 'public' or entry.api):
generate_cfunction_declaration(entry, env, code, definition)
def generate_variable_definitions(self, env, code):
for entry in env.var_entries:
if not entry.in_cinclude and entry.visibility == "public":
code.put(entry.type.declaration_code(entry.cname))
if entry.init is not None:
init = entry.type.literal_code(entry.init)
code.put_safe(" = %s" % init)
code.putln(";")
def generate_typeobj_definitions(self, env, code):
full_module_name = env.qualified_name
for entry in env.c_class_entries:
#print "generate_typeobj_definitions:", entry.name
#print "...visibility =", entry.visibility
if entry.visibility != 'extern':
type = entry.type
scope = type.scope
if scope: # could be None if there was an error
self.generate_exttype_vtable(scope, code)
self.generate_new_function(scope, code, entry)
self.generate_dealloc_function(scope, code)
if scope.needs_gc():
self.generate_traverse_function(scope, code, entry)
if scope.needs_tp_clear():
self.generate_clear_function(scope, code, entry)
if scope.defines_any(["__getitem__"]):
self.generate_getitem_int_function(scope, code)
if scope.defines_any(["__setitem__", "__delitem__"]):
self.generate_ass_subscript_function(scope, code)
if scope.defines_any(["__getslice__", "__setslice__", "__delslice__"]):
warning(self.pos,
"__getslice__, __setslice__, and __delslice__ are not supported by Python 3, "
"use __getitem__, __setitem__, and __delitem__ instead", 1)
code.putln("#if PY_MAJOR_VERSION >= 3")
code.putln("#error __getslice__, __setslice__, and __delslice__ not supported in Python 3.")
code.putln("#endif")
if scope.defines_any(["__setslice__", "__delslice__"]):
self.generate_ass_slice_function(scope, code)
if scope.defines_any(["__getattr__", "__getattribute__"]):
self.generate_getattro_function(scope, code)
if scope.defines_any(["__setattr__", "__delattr__"]):
self.generate_setattro_function(scope, code)
if scope.defines_any(["__get__"]):
self.generate_descr_get_function(scope, code)
if scope.defines_any(["__set__", "__delete__"]):
self.generate_descr_set_function(scope, code)
self.generate_property_accessors(scope, code)
self.generate_method_table(scope, code)
self.generate_getset_table(scope, code)
self.generate_typeobj_definition(full_module_name, entry, code)
def generate_exttype_vtable(self, scope, code):
# Generate the definition of an extension type's vtable.
type = scope.parent_type
if type.vtable_cname:
code.putln("static struct %s %s;" % (
type.vtabstruct_cname,
type.vtable_cname))
def generate_self_cast(self, scope, code):
type = scope.parent_type
code.putln(
"%s = (%s)o;" % (
type.declaration_code("p"),
type.empty_declaration_code()))
def generate_new_function(self, scope, code, cclass_entry):
tp_slot = TypeSlots.ConstructorSlot("tp_new", '__new__')
slot_func = scope.mangle_internal("tp_new")
type = scope.parent_type
base_type = type.base_type
have_entries, (py_attrs, py_buffers, memoryview_slices) = \
scope.get_refcounted_entries()
is_final_type = scope.parent_type.is_final_type
if scope.is_internal:
# internal classes (should) never need None inits, normal zeroing will do
py_attrs = []
cpp_class_attrs = [entry for entry in scope.var_entries
if entry.type.is_cpp_class]
new_func_entry = scope.lookup_here("__new__")
if base_type or (new_func_entry and new_func_entry.is_special
and not new_func_entry.trivial_signature):
unused_marker = ''
else:
unused_marker = 'CYTHON_UNUSED '
if base_type:
freelist_size = 0 # not currently supported
else:
freelist_size = scope.directives.get('freelist', 0)
freelist_name = scope.mangle_internal(Naming.freelist_name)
freecount_name = scope.mangle_internal(Naming.freecount_name)
decls = code.globalstate['decls']
decls.putln("static PyObject *%s(PyTypeObject *t, PyObject *a, PyObject *k); /*proto*/" %
slot_func)
code.putln("")
if freelist_size:
code.putln("static %s[%d];" % (
scope.parent_type.declaration_code(freelist_name),
freelist_size))
code.putln("static int %s = 0;" % freecount_name)
code.putln("")
code.putln(
"static PyObject *%s(PyTypeObject *t, %sPyObject *a, %sPyObject *k) {" % (
slot_func, unused_marker, unused_marker))
need_self_cast = (type.vtabslot_cname or
(py_buffers or memoryview_slices or py_attrs) or
cpp_class_attrs)
if need_self_cast:
code.putln("%s;" % scope.parent_type.declaration_code("p"))
if base_type:
tp_new = TypeSlots.get_base_slot_function(scope, tp_slot)
if tp_new is None:
tp_new = "%s->tp_new" % base_type.typeptr_cname
code.putln("PyObject *o = %s(t, a, k);" % tp_new)
else:
code.putln("PyObject *o;")
if freelist_size:
code.globalstate.use_utility_code(
UtilityCode.load_cached("IncludeStringH", "StringTools.c"))
if is_final_type:
type_safety_check = ''
else:
type_safety_check = ' & ((t->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0)'
obj_struct = type.declaration_code("", deref=True)
code.putln(
"if (CYTHON_COMPILING_IN_CPYTHON && likely((%s > 0) & (t->tp_basicsize == sizeof(%s))%s)) {" % (
freecount_name, obj_struct, type_safety_check))
code.putln("o = (PyObject*)%s[--%s];" % (
freelist_name, freecount_name))
code.putln("memset(o, 0, sizeof(%s));" % obj_struct)
code.putln("(void) PyObject_INIT(o, t);")
if scope.needs_gc():
code.putln("PyObject_GC_Track(o);")
code.putln("} else {")
if not is_final_type:
code.putln("if (likely((t->tp_flags & Py_TPFLAGS_IS_ABSTRACT) == 0)) {")
code.putln("o = (*t->tp_alloc)(t, 0);")
if not is_final_type:
code.putln("} else {")
code.putln("o = (PyObject *) PyBaseObject_Type.tp_new(t, %s, 0);" % Naming.empty_tuple)
code.putln("}")
code.putln("if (unlikely(!o)) return 0;")
if freelist_size and not base_type:
code.putln('}')
if need_self_cast:
code.putln("p = %s;" % type.cast_code("o"))
#if need_self_cast:
# self.generate_self_cast(scope, code)
if type.vtabslot_cname:
vtab_base_type = type
while vtab_base_type.base_type and vtab_base_type.base_type.vtabstruct_cname:
vtab_base_type = vtab_base_type.base_type
if vtab_base_type is not type:
struct_type_cast = "(struct %s*)" % vtab_base_type.vtabstruct_cname
else:
struct_type_cast = ""
code.putln("p->%s = %s%s;" % (
type.vtabslot_cname,
struct_type_cast, type.vtabptr_cname))
for entry in cpp_class_attrs:
code.putln("new((void*)&(p->%s)) %s();" %
(entry.cname, entry.type.empty_declaration_code()))
for entry in py_attrs:
code.put_init_var_to_py_none(entry, "p->%s", nanny=False)
for entry in memoryview_slices:
code.putln("p->%s.data = NULL;" % entry.cname)
code.putln("p->%s.memview = NULL;" % entry.cname)
for entry in py_buffers:
code.putln("p->%s.obj = NULL;" % entry.cname)
if cclass_entry.cname == '__pyx_memoryviewslice':
code.putln("p->from_slice.memview = NULL;")
if new_func_entry and new_func_entry.is_special:
if new_func_entry.trivial_signature:
cinit_args = "o, %s, NULL" % Naming.empty_tuple
else:
cinit_args = "o, a, k"
code.putln(
"if (unlikely(%s(%s) < 0)) {" % (
new_func_entry.func_cname, cinit_args))
code.put_decref_clear("o", py_object_type, nanny=False)
code.putln(
"}")
code.putln(
"return o;")
code.putln(
"}")
def generate_dealloc_function(self, scope, code):
tp_slot = TypeSlots.ConstructorSlot("tp_dealloc", '__dealloc__')
slot_func = scope.mangle_internal("tp_dealloc")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
slot_func_cname = scope.mangle_internal("tp_dealloc")
code.putln("")
code.putln(
"static void %s(PyObject *o) {" % slot_func_cname)
is_final_type = scope.parent_type.is_final_type
needs_gc = scope.needs_gc()
weakref_slot = scope.lookup_here("__weakref__")
if weakref_slot not in scope.var_entries:
weakref_slot = None
_, (py_attrs, _, memoryview_slices) = scope.get_refcounted_entries()
cpp_class_attrs = [entry for entry in scope.var_entries
if entry.type.is_cpp_class]
if py_attrs or cpp_class_attrs or memoryview_slices or weakref_slot:
self.generate_self_cast(scope, code)
if not is_final_type:
# in Py3.4+, call tp_finalize() as early as possible
code.putln("#if PY_VERSION_HEX >= 0x030400a1")
if needs_gc:
finalised_check = '!_PyGC_FINALIZED(o)'
else:
finalised_check = (
'(!PyType_IS_GC(Py_TYPE(o)) || !_PyGC_FINALIZED(o))')
code.putln("if (unlikely(Py_TYPE(o)->tp_finalize) && %s) {" %
finalised_check)
# if instance was resurrected by finaliser, return
code.putln("if (PyObject_CallFinalizerFromDealloc(o)) return;")
code.putln("}")
code.putln("#endif")
if needs_gc:
# We must mark this object as (gc) untracked while tearing
# it down, lest the garbage collection is invoked while
# running this destructor.
code.putln("PyObject_GC_UnTrack(o);")
# call the user's __dealloc__
self.generate_usr_dealloc_call(scope, code)
if weakref_slot:
code.putln("if (p->__weakref__) PyObject_ClearWeakRefs(o);")
for entry in cpp_class_attrs:
code.putln("__Pyx_call_destructor(p->%s);" % entry.cname)
for entry in py_attrs:
code.put_xdecref_clear("p->%s" % entry.cname, entry.type, nanny=False,
clear_before_decref=True)
for entry in memoryview_slices:
code.put_xdecref_memoryviewslice("p->%s" % entry.cname,
have_gil=True)
if base_type:
if needs_gc:
# The base class deallocator probably expects this to be tracked,
# so undo the untracking above.
if base_type.scope and base_type.scope.needs_gc():
code.putln("PyObject_GC_Track(o);")
else:
code.putln("#if CYTHON_COMPILING_IN_CPYTHON")
code.putln("if (PyType_IS_GC(Py_TYPE(o)->tp_base))")
code.putln("#endif")
code.putln("PyObject_GC_Track(o);")
tp_dealloc = TypeSlots.get_base_slot_function(scope, tp_slot)
if tp_dealloc is not None:
code.putln("%s(o);" % tp_dealloc)
elif base_type.is_builtin_type:
code.putln("%s->tp_dealloc(o);" % base_type.typeptr_cname)
else:
# This is an externally defined type. Calling through the
# cimported base type pointer directly interacts badly with
# the module cleanup, which may already have cleared it.
# In that case, fall back to traversing the type hierarchy.
base_cname = base_type.typeptr_cname
code.putln("if (likely(%s)) %s->tp_dealloc(o); "
"else __Pyx_call_next_tp_dealloc(o, %s);" % (
base_cname, base_cname, slot_func_cname))
code.globalstate.use_utility_code(
UtilityCode.load_cached("CallNextTpDealloc", "ExtensionTypes.c"))
else:
freelist_size = scope.directives.get('freelist', 0)
if freelist_size:
freelist_name = scope.mangle_internal(Naming.freelist_name)
freecount_name = scope.mangle_internal(Naming.freecount_name)
if is_final_type:
type_safety_check = ''
else:
type_safety_check = (
' & ((Py_TYPE(o)->tp_flags & (Py_TPFLAGS_IS_ABSTRACT | Py_TPFLAGS_HEAPTYPE)) == 0)')
type = scope.parent_type
code.putln(
"if (CYTHON_COMPILING_IN_CPYTHON && ((%s < %d) & (Py_TYPE(o)->tp_basicsize == sizeof(%s))%s)) {" % (
freecount_name,
freelist_size,
type.declaration_code("", deref=True),
type_safety_check))
code.putln("%s[%s++] = %s;" % (
freelist_name, freecount_name, type.cast_code("o")))
code.putln("} else {")
code.putln("(*Py_TYPE(o)->tp_free)(o);")
if freelist_size:
code.putln("}")
code.putln(
"}")
def generate_usr_dealloc_call(self, scope, code):
entry = scope.lookup_here("__dealloc__")
if not entry:
return
code.putln("{")
code.putln("PyObject *etype, *eval, *etb;")
code.putln("PyErr_Fetch(&etype, &eval, &etb);")
code.putln("++Py_REFCNT(o);")
code.putln("%s(o);" % entry.func_cname)
code.putln("--Py_REFCNT(o);")
code.putln("PyErr_Restore(etype, eval, etb);")
code.putln("}")
def generate_traverse_function(self, scope, code, cclass_entry):
tp_slot = TypeSlots.GCDependentSlot("tp_traverse")
slot_func = scope.mangle_internal("tp_traverse")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
code.putln("")
code.putln(
"static int %s(PyObject *o, visitproc v, void *a) {" % slot_func)
have_entries, (py_attrs, py_buffers, memoryview_slices) = (
scope.get_refcounted_entries(include_gc_simple=False))
if base_type or py_attrs:
code.putln("int e;")
if py_attrs or py_buffers:
self.generate_self_cast(scope, code)
if base_type:
# want to call it explicitly if possible so inlining can be performed
static_call = TypeSlots.get_base_slot_function(scope, tp_slot)
if static_call:
code.putln("e = %s(o, v, a); if (e) return e;" % static_call)
elif base_type.is_builtin_type:
base_cname = base_type.typeptr_cname
code.putln("if (!%s->tp_traverse); else { e = %s->tp_traverse(o,v,a); if (e) return e; }" % (
base_cname, base_cname))
else:
# This is an externally defined type. Calling through the
# cimported base type pointer directly interacts badly with
# the module cleanup, which may already have cleared it.
# In that case, fall back to traversing the type hierarchy.
base_cname = base_type.typeptr_cname
code.putln(
"e = ((likely(%s)) ? ((%s->tp_traverse) ? %s->tp_traverse(o, v, a) : 0) : "
"__Pyx_call_next_tp_traverse(o, v, a, %s)); if (e) return e;" % (
base_cname, base_cname, base_cname, slot_func))
code.globalstate.use_utility_code(
UtilityCode.load_cached("CallNextTpTraverse", "ExtensionTypes.c"))
for entry in py_attrs:
var_code = "p->%s" % entry.cname
code.putln("if (%s) {" % var_code)
if entry.type.is_extension_type:
var_code = "((PyObject*)%s)" % var_code
code.putln("e = (*v)(%s, a); if (e) return e;" % var_code)
code.putln("}")
# Traverse buffer exporting objects.
# Note: not traversing memoryview attributes of memoryview slices!
# When triggered by the GC, it would cause multiple visits (gc_refs
# subtractions which is not matched by its reference count!)
for entry in py_buffers:
cname = entry.cname + ".obj"
code.putln("if (p->%s) {" % cname)
code.putln("e = (*v)(p->%s, a); if (e) return e;" % cname)
code.putln("}")
code.putln("return 0;")
code.putln("}")
def generate_clear_function(self, scope, code, cclass_entry):
tp_slot = TypeSlots.GCDependentSlot("tp_clear")
slot_func = scope.mangle_internal("tp_clear")
base_type = scope.parent_type.base_type
if tp_slot.slot_code(scope) != slot_func:
return # never used
have_entries, (py_attrs, py_buffers, memoryview_slices) = (
scope.get_refcounted_entries(include_gc_simple=False))
if py_attrs or py_buffers or base_type:
unused = ''
else:
unused = 'CYTHON_UNUSED '
code.putln("")
code.putln("static int %s(%sPyObject *o) {" % (slot_func, unused))
if py_attrs and Options.clear_to_none:
code.putln("PyObject* tmp;")
if py_attrs or py_buffers:
self.generate_self_cast(scope, code)
if base_type:
# want to call it explicitly if possible so inlining can be performed
static_call = TypeSlots.get_base_slot_function(scope, tp_slot)
if static_call:
code.putln("%s(o);" % static_call)
elif base_type.is_builtin_type:
base_cname = base_type.typeptr_cname
code.putln("if (!%s->tp_clear); else %s->tp_clear(o);" % (
base_cname, base_cname))
else:
# This is an externally defined type. Calling through the
# cimported base type pointer directly interacts badly with
# the module cleanup, which may already have cleared it.
# In that case, fall back to traversing the type hierarchy.
base_cname = base_type.typeptr_cname
code.putln(
"if (likely(%s)) { if (%s->tp_clear) %s->tp_clear(o); } else __Pyx_call_next_tp_clear(o, %s);" % (
base_cname, base_cname, base_cname, slot_func))
code.globalstate.use_utility_code(
UtilityCode.load_cached("CallNextTpClear", "ExtensionTypes.c"))
if Options.clear_to_none:
for entry in py_attrs:
name = "p->%s" % entry.cname
code.putln("tmp = ((PyObject*)%s);" % name)
if entry.is_declared_generic:
code.put_init_to_py_none(name, py_object_type, nanny=False)
else:
code.put_init_to_py_none(name, entry.type, nanny=False)
code.putln("Py_XDECREF(tmp);")
else:
for entry in py_attrs:
code.putln("Py_CLEAR(p->%s);" % entry.cname)
for entry in py_buffers:
# Note: shouldn't this call __Pyx_ReleaseBuffer ??
code.putln("Py_CLEAR(p->%s.obj);" % entry.cname)
if cclass_entry.cname == '__pyx_memoryviewslice':
code.putln("__PYX_XDEC_MEMVIEW(&p->from_slice, 1);")
code.putln("return 0;")
code.putln("}")
def generate_getitem_int_function(self, scope, code):
# This function is put into the sq_item slot when
# a __getitem__ method is present. It converts its
# argument to a Python integer and calls mp_subscript.
code.putln(
"static PyObject *%s(PyObject *o, Py_ssize_t i) {" % (
scope.mangle_internal("sq_item")))
code.putln(
"PyObject *r;")
code.putln(
"PyObject *x = PyInt_FromSsize_t(i); if(!x) return 0;")
code.putln(
"r = Py_TYPE(o)->tp_as_mapping->mp_subscript(o, x);")
code.putln(
"Py_DECREF(x);")
code.putln(
"return r;")
code.putln(
"}")
def generate_ass_subscript_function(self, scope, code):
# Setting and deleting an item are both done through
# the ass_subscript method, so we dispatch to user's __setitem__
# or __delitem__, or raise an exception.
base_type = scope.parent_type.base_type
set_entry = scope.lookup_here("__setitem__")
del_entry = scope.lookup_here("__delitem__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *i, PyObject *v) {" % (
scope.mangle_internal("mp_ass_subscript")))
code.putln(
"if (v) {")
if set_entry:
code.putln("return %s(o, i, v);" % set_entry.func_cname)
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "Subscript assignment not supported by %.200s", Py_TYPE(o)->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o, i);" % (
del_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_mapping", "mp_ass_subscript", "o, i, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "Subscript deletion not supported by %.200s", Py_TYPE(o)->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_guarded_basetype_call(
self, base_type, substructure, slot, args, code):
if base_type:
base_tpname = base_type.typeptr_cname
if substructure:
code.putln(
"if (%s->%s && %s->%s->%s)" % (
base_tpname, substructure, base_tpname, substructure, slot))
code.putln(
" return %s->%s->%s(%s);" % (
base_tpname, substructure, slot, args))
else:
code.putln(
"if (%s->%s)" % (
base_tpname, slot))
code.putln(
" return %s->%s(%s);" % (
base_tpname, slot, args))
def generate_ass_slice_function(self, scope, code):
# Setting and deleting a slice are both done through
# the ass_slice method, so we dispatch to user's __setslice__
# or __delslice__, or raise an exception.
base_type = scope.parent_type.base_type
set_entry = scope.lookup_here("__setslice__")
del_entry = scope.lookup_here("__delslice__")
code.putln("")
code.putln(
"static int %s(PyObject *o, Py_ssize_t i, Py_ssize_t j, PyObject *v) {" % (
scope.mangle_internal("sq_ass_slice")))
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, i, j, v);" % (
set_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "2-element slice assignment not supported by %.200s", Py_TYPE(o)->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o, i, j);" % (
del_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, "tp_as_sequence", "sq_ass_slice", "o, i, j, v", code)
code.putln(
"PyErr_Format(PyExc_NotImplementedError,")
code.putln(
' "2-element slice deletion not supported by %.200s", Py_TYPE(o)->tp_name);')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_getattro_function(self, scope, code):
# First try to get the attribute using __getattribute__, if defined, or
# PyObject_GenericGetAttr.
#
# If that raises an AttributeError, call the __getattr__ if defined.
#
# In both cases, defined can be in this class, or any base class.
def lookup_here_or_base(n, type=None):
# Recursive lookup
if type is None:
type = scope.parent_type
r = type.scope.lookup_here(n)
if r is None and \
type.base_type is not None:
return lookup_here_or_base(n, type.base_type)
else:
return r
getattr_entry = lookup_here_or_base("__getattr__")
getattribute_entry = lookup_here_or_base("__getattribute__")
code.putln("")
code.putln(
"static PyObject *%s(PyObject *o, PyObject *n) {" % (
scope.mangle_internal("tp_getattro")))
if getattribute_entry is not None:
code.putln(
"PyObject *v = %s(o, n);" % (
getattribute_entry.func_cname))
else:
code.putln(
"PyObject *v = PyObject_GenericGetAttr(o, n);")
if getattr_entry is not None:
code.putln(
"if (!v && PyErr_ExceptionMatches(PyExc_AttributeError)) {")
code.putln(
"PyErr_Clear();")
code.putln(
"v = %s(o, n);" % (
getattr_entry.func_cname))
code.putln(
"}")
code.putln(
"return v;")
code.putln(
"}")
def generate_setattro_function(self, scope, code):
# Setting and deleting an attribute are both done through
# the setattro method, so we dispatch to user's __setattr__
# or __delattr__ or fall back on PyObject_GenericSetAttr.
base_type = scope.parent_type.base_type
set_entry = scope.lookup_here("__setattr__")
del_entry = scope.lookup_here("__delattr__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *n, PyObject *v) {" % (
scope.mangle_internal("tp_setattro")))
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, n, v);" % (
set_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_setattro", "o, n, v", code)
code.putln(
"return PyObject_GenericSetAttr(o, n, v);")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o, n);" % (
del_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_setattro", "o, n, v", code)
code.putln(
"return PyObject_GenericSetAttr(o, n, 0);")
code.putln(
"}")
code.putln(
"}")
def generate_descr_get_function(self, scope, code):
# The __get__ function of a descriptor object can be
# called with NULL for the second or third arguments
# under some circumstances, so we replace them with
# None in that case.
user_get_entry = scope.lookup_here("__get__")
code.putln("")
code.putln(
"static PyObject *%s(PyObject *o, PyObject *i, PyObject *c) {" % (
scope.mangle_internal("tp_descr_get")))
code.putln(
"PyObject *r = 0;")
code.putln(
"if (!i) i = Py_None;")
code.putln(
"if (!c) c = Py_None;")
#code.put_incref("i", py_object_type)
#code.put_incref("c", py_object_type)
code.putln(
"r = %s(o, i, c);" % (
user_get_entry.func_cname))
#code.put_decref("i", py_object_type)
#code.put_decref("c", py_object_type)
code.putln(
"return r;")
code.putln(
"}")
def generate_descr_set_function(self, scope, code):
# Setting and deleting are both done through the __set__
# method of a descriptor, so we dispatch to user's __set__
# or __delete__ or raise an exception.
base_type = scope.parent_type.base_type
user_set_entry = scope.lookup_here("__set__")
user_del_entry = scope.lookup_here("__delete__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *i, PyObject *v) {" % (
scope.mangle_internal("tp_descr_set")))
code.putln(
"if (v) {")
if user_set_entry:
code.putln(
"return %s(o, i, v);" % (
user_set_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_descr_set", "o, i, v", code)
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if user_del_entry:
code.putln(
"return %s(o, i);" % (
user_del_entry.func_cname))
else:
self.generate_guarded_basetype_call(
base_type, None, "tp_descr_set", "o, i, v", code)
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__delete__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_property_accessors(self, cclass_scope, code):
for entry in cclass_scope.property_entries:
property_scope = entry.scope
if property_scope.defines_any(["__get__"]):
self.generate_property_get_function(entry, code)
if property_scope.defines_any(["__set__", "__del__"]):
self.generate_property_set_function(entry, code)
def generate_property_get_function(self, property_entry, code):
property_scope = property_entry.scope
property_entry.getter_cname = property_scope.parent_scope.mangle(
Naming.prop_get_prefix, property_entry.name)
get_entry = property_scope.lookup_here("__get__")
code.putln("")
code.putln(
"static PyObject *%s(PyObject *o, CYTHON_UNUSED void *x) {" % (
property_entry.getter_cname))
code.putln(
"return %s(o);" % (
get_entry.func_cname))
code.putln(
"}")
def generate_property_set_function(self, property_entry, code):
property_scope = property_entry.scope
property_entry.setter_cname = property_scope.parent_scope.mangle(
Naming.prop_set_prefix, property_entry.name)
set_entry = property_scope.lookup_here("__set__")
del_entry = property_scope.lookup_here("__del__")
code.putln("")
code.putln(
"static int %s(PyObject *o, PyObject *v, CYTHON_UNUSED void *x) {" % (
property_entry.setter_cname))
code.putln(
"if (v) {")
if set_entry:
code.putln(
"return %s(o, v);" % (
set_entry.func_cname))
else:
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__set__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"else {")
if del_entry:
code.putln(
"return %s(o);" % (
del_entry.func_cname))
else:
code.putln(
'PyErr_SetString(PyExc_NotImplementedError, "__del__");')
code.putln(
"return -1;")
code.putln(
"}")
code.putln(
"}")
def generate_typeobj_definition(self, modname, entry, code):
type = entry.type
scope = type.scope
for suite in TypeSlots.substructures:
suite.generate_substructure(scope, code)
code.putln("")
if entry.visibility == 'public':
header = "DL_EXPORT(PyTypeObject) %s = {"
else:
header = "static PyTypeObject %s = {"
#code.putln(header % scope.parent_type.typeobj_cname)
code.putln(header % type.typeobj_cname)
code.putln(
"PyVarObject_HEAD_INIT(0, 0)")
code.putln(
'"%s.%s", /*tp_name*/' % (
self.full_module_name, scope.class_name))
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
code.putln(
"sizeof(%s), /*tp_basicsize*/" % objstruct)
code.putln(
"0, /*tp_itemsize*/")
for slot in TypeSlots.slot_table:
slot.generate(scope, code)
code.putln(
"};")
def generate_method_table(self, env, code):
if env.is_c_class_scope and not env.pyfunc_entries:
return
code.putln("")
code.putln(
"static PyMethodDef %s[] = {" % (
env.method_table_cname))
for entry in env.pyfunc_entries:
if not entry.fused_cfunction:
code.put_pymethoddef(entry, ",")
code.putln(
"{0, 0, 0, 0}")
code.putln(
"};")
def generate_getset_table(self, env, code):
if env.property_entries:
code.putln("")
code.putln(
"static struct PyGetSetDef %s[] = {" %
env.getset_table_cname)
for entry in env.property_entries:
doc = entry.doc
if doc:
if doc.is_unicode:
doc = doc.as_utf8_string()
doc_code = doc.as_c_string_literal()
else:
doc_code = "0"
code.putln(
'{(char *)"%s", %s, %s, (char *)%s, 0},' % (
entry.name,
entry.getter_cname or "0",
entry.setter_cname or "0",
doc_code))
code.putln(
"{0, 0, 0, 0, 0}")
code.putln(
"};")
def create_import_star_conversion_utility_code(self, env):
# Create all conversion helpers that are needed for "import *" assignments.
# Must be done before code generation to support CythonUtilityCode.
for name, entry in sorted(env.entries.items()):
if entry.is_cglobal and entry.used:
if not entry.type.is_pyobject:
entry.type.create_from_py_utility_code(env)
def generate_import_star(self, env, code):
env.use_utility_code(UtilityCode.load_cached("CStringEquals", "StringTools.c"))
code.putln()
code.enter_cfunc_scope() # as we need labels
code.putln("static int %s(PyObject *o, PyObject* py_name, char *name) {" % Naming.import_star_set)
code.putln("static const char* internal_type_names[] = {")
for name, entry in sorted(env.entries.items()):
if entry.is_type:
code.putln('"%s",' % name)
code.putln("0")
code.putln("};")
code.putln("const char** type_name = internal_type_names;")
code.putln("while (*type_name) {")
code.putln("if (__Pyx_StrEq(name, *type_name)) {")
code.putln('PyErr_Format(PyExc_TypeError, "Cannot overwrite C type %s", name);')
code.putln('goto bad;')
code.putln("}")
code.putln("type_name++;")
code.putln("}")
old_error_label = code.new_error_label()
code.putln("if (0);") # so the first one can be "else if"
msvc_count = 0
for name, entry in sorted(env.entries.items()):
if entry.is_cglobal and entry.used:
msvc_count += 1
if msvc_count % 100 == 0:
code.putln("#ifdef _MSC_VER")
code.putln("if (0); /* Workaround for MSVC C1061. */")
code.putln("#endif")
code.putln('else if (__Pyx_StrEq(name, "%s")) {' % name)
if entry.type.is_pyobject:
if entry.type.is_extension_type or entry.type.is_builtin_type:
code.putln("if (!(%s)) %s;" % (
entry.type.type_test_code("o"),
code.error_goto(entry.pos)))
code.putln("Py_INCREF(o);")
code.put_decref(entry.cname, entry.type, nanny=False)
code.putln("%s = %s;" % (
entry.cname,
PyrexTypes.typecast(entry.type, py_object_type, "o")))
elif entry.type.create_from_py_utility_code(env):
# if available, utility code was already created in self.prepare_utility_code()
code.putln(entry.type.from_py_call_code(
'o', entry.cname, entry.pos, code))
else:
code.putln('PyErr_Format(PyExc_TypeError, "Cannot convert Python object %s to %s");' % (
name, entry.type))
code.putln(code.error_goto(entry.pos))
code.putln("}")
code.putln("else {")
code.putln("if (PyObject_SetAttr(%s, py_name, o) < 0) goto bad;" % Naming.module_cname)
code.putln("}")
code.putln("return 0;")
if code.label_used(code.error_label):
code.put_label(code.error_label)
# This helps locate the offending name.
code.put_add_traceback(self.full_module_name)
code.error_label = old_error_label
code.putln("bad:")
code.putln("return -1;")
code.putln("}")
code.putln("")
code.putln(UtilityCode.load_cached("ImportStar", "ImportExport.c").impl)
code.exit_cfunc_scope() # done with labels
def generate_module_init_func(self, imported_modules, env, code):
code.enter_cfunc_scope()
code.putln("")
header2 = "PyMODINIT_FUNC init%s(void)" % env.module_name
header3 = "PyMODINIT_FUNC PyInit_%s(void)" % env.module_name
code.putln("#if PY_MAJOR_VERSION < 3")
code.putln("%s; /*proto*/" % header2)
code.putln(header2)
code.putln("#else")
code.putln("%s; /*proto*/" % header3)
code.putln(header3)
code.putln("#endif")
code.putln("{")
tempdecl_code = code.insertion_point()
profile = code.globalstate.directives['profile']
linetrace = code.globalstate.directives['linetrace']
if profile or linetrace:
code.globalstate.use_utility_code(UtilityCode.load_cached("Profile", "Profile.c"))
code.put_declare_refcount_context()
if profile or linetrace:
tempdecl_code.put_trace_declarations()
code.put_trace_frame_init()
code.putln("#if CYTHON_REFNANNY")
code.putln("__Pyx_RefNanny = __Pyx_RefNannyImportAPI(\"refnanny\");")
code.putln("if (!__Pyx_RefNanny) {")
code.putln(" PyErr_Clear();")
code.putln(" __Pyx_RefNanny = __Pyx_RefNannyImportAPI(\"Cython.Runtime.refnanny\");")
code.putln(" if (!__Pyx_RefNanny)")
code.putln(" Py_FatalError(\"failed to import 'refnanny' module\");")
code.putln("}")
code.putln("#endif")
code.put_setup_refcount_context(header3)
env.use_utility_code(UtilityCode.load("CheckBinaryVersion", "ModuleSetupCode.c"))
code.put_error_if_neg(self.pos, "__Pyx_check_binary_version()")
code.putln("%s = PyTuple_New(0); %s" % (
Naming.empty_tuple, code.error_goto_if_null(Naming.empty_tuple, self.pos)))
code.putln("%s = PyBytes_FromStringAndSize(\"\", 0); %s" % (
Naming.empty_bytes, code.error_goto_if_null(Naming.empty_bytes, self.pos)))
code.putln("%s = PyUnicode_FromStringAndSize(\"\", 0); %s" % (
Naming.empty_unicode, code.error_goto_if_null(Naming.empty_unicode, self.pos)))
for ext_type in ('CyFunction', 'FusedFunction', 'Coroutine', 'Generator', 'StopAsyncIteration'):
code.putln("#ifdef __Pyx_%s_USED" % ext_type)
code.put_error_if_neg(self.pos, "__pyx_%s_init()" % ext_type)
code.putln("#endif")
code.putln("/*--- Library function declarations ---*/")
env.generate_library_function_declarations(code)
code.putln("/*--- Threads initialization code ---*/")
code.putln("#if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS")
code.putln("#ifdef WITH_THREAD /* Python build with threading support? */")
code.putln("PyEval_InitThreads();")
code.putln("#endif")
code.putln("#endif")
code.putln("/*--- Module creation code ---*/")
self.generate_module_creation_code(env, code)
code.putln("/*--- Initialize various global constants etc. ---*/")
code.put_error_if_neg(self.pos, "__Pyx_InitGlobals()")
code.putln("#if PY_MAJOR_VERSION < 3 && (__PYX_DEFAULT_STRING_ENCODING_IS_ASCII || "
"__PYX_DEFAULT_STRING_ENCODING_IS_DEFAULT)")
code.put_error_if_neg(self.pos, "__Pyx_init_sys_getdefaultencoding_params()")
code.putln("#endif")
__main__name = code.globalstate.get_py_string_const(
EncodedString("__main__"), identifier=True)
code.putln("if (%s%s) {" % (Naming.module_is_main, self.full_module_name.replace('.', '__')))
code.put_error_if_neg(self.pos, 'PyObject_SetAttrString(%s, "__name__", %s)' % (
env.module_cname,
__main__name.cname))
code.putln("}")
# set up __file__ and __path__, then add the module to sys.modules
self.generate_module_import_setup(env, code)
if Options.cache_builtins:
code.putln("/*--- Builtin init code ---*/")
code.put_error_if_neg(self.pos, "__Pyx_InitCachedBuiltins()")
code.putln("/*--- Constants init code ---*/")
code.put_error_if_neg(self.pos, "__Pyx_InitCachedConstants()")
code.putln("/*--- Global init code ---*/")
self.generate_global_init_code(env, code)
code.putln("/*--- Variable export code ---*/")
self.generate_c_variable_export_code(env, code)
code.putln("/*--- Function export code ---*/")
self.generate_c_function_export_code(env, code)
code.putln("/*--- Type init code ---*/")
self.generate_type_init_code(env, code)
code.putln("/*--- Type import code ---*/")
for module in imported_modules:
self.generate_type_import_code_for_module(module, env, code)
code.putln("/*--- Variable import code ---*/")
for module in imported_modules:
self.generate_c_variable_import_code_for_module(module, env, code)
code.putln("/*--- Function import code ---*/")
for module in imported_modules:
self.specialize_fused_types(module)
self.generate_c_function_import_code_for_module(module, env, code)
code.putln("/*--- Execution code ---*/")
code.mark_pos(None)
code.putln("#if defined(__Pyx_Generator_USED) || defined(__Pyx_Coroutine_USED)")
code.put_error_if_neg(self.pos, "__Pyx_patch_abc()")
code.putln("#endif")
if profile or linetrace:
code.put_trace_call(header3, self.pos, nogil=not code.funcstate.gil_owned)
code.funcstate.can_trace = True
self.body.generate_execution_code(code)
if profile or linetrace:
code.funcstate.can_trace = False
code.put_trace_return("Py_None", nogil=not code.funcstate.gil_owned)
code.putln()
code.putln("/*--- Wrapped vars code ---*/")
self.generate_wrapped_entries_code(env, code)
code.putln()
if Options.generate_cleanup_code:
code.globalstate.use_utility_code(
UtilityCode.load_cached("RegisterModuleCleanup", "ModuleSetupCode.c"))
code.putln("if (__Pyx_RegisterCleanup()) %s;" % code.error_goto(self.pos))
code.put_goto(code.return_label)
code.put_label(code.error_label)
for cname, type in code.funcstate.all_managed_temps():
code.put_xdecref(cname, type)
code.putln('if (%s) {' % env.module_cname)
code.putln('if (%s) {' % env.module_dict_cname)
code.put_add_traceback("init %s" % env.qualified_name)
code.globalstate.use_utility_code(Nodes.traceback_utility_code)
# Module reference and module dict are in global variables which might still be needed
# for cleanup, atexit code, etc., so leaking is better than crashing.
# At least clearing the module dict here might be a good idea, but could still break
# user code in atexit or other global registries.
##code.put_decref_clear(env.module_dict_cname, py_object_type, nanny=False)
code.putln('}')
code.put_decref_clear(env.module_cname, py_object_type, nanny=False)
code.putln('} else if (!PyErr_Occurred()) {')
code.putln('PyErr_SetString(PyExc_ImportError, "init %s");' % env.qualified_name)
code.putln('}')
code.put_label(code.return_label)
code.put_finish_refcount_context()
code.putln("#if PY_MAJOR_VERSION < 3")
code.putln("return;")
code.putln("#else")
code.putln("return %s;" % env.module_cname)
code.putln("#endif")
code.putln('}')
tempdecl_code.put_temp_declarations(code.funcstate)
code.exit_cfunc_scope()
def generate_module_import_setup(self, env, code):
module_path = env.directives['set_initial_path']
if module_path == 'SOURCEFILE':
module_path = self.pos[0].filename
if module_path:
code.putln('if (PyObject_SetAttrString(%s, "__file__", %s) < 0) %s;' % (
env.module_cname,
code.globalstate.get_py_string_const(
EncodedString(decode_filename(module_path))).cname,
code.error_goto(self.pos)))
if env.is_package:
# set __path__ to mark the module as package
temp = code.funcstate.allocate_temp(py_object_type, True)
code.putln('%s = Py_BuildValue("[O]", %s); %s' % (
temp,
code.globalstate.get_py_string_const(
EncodedString(decode_filename(
os.path.dirname(module_path)))).cname,
code.error_goto_if_null(temp, self.pos)))
code.put_gotref(temp)
code.putln(
'if (PyObject_SetAttrString(%s, "__path__", %s) < 0) %s;' % (
env.module_cname, temp, code.error_goto(self.pos)))
code.put_decref_clear(temp, py_object_type)
code.funcstate.release_temp(temp)
elif env.is_package:
# packages require __path__, so all we can do is try to figure
# out the module path at runtime by rerunning the import lookup
package_name, _ = self.full_module_name.rsplit('.', 1)
if '.' in package_name:
parent_name = '"%s"' % (package_name.rsplit('.', 1)[0],)
else:
parent_name = 'NULL'
code.globalstate.use_utility_code(UtilityCode.load(
"SetPackagePathFromImportLib", "ImportExport.c"))
code.putln(code.error_goto_if_neg(
'__Pyx_SetPackagePathFromImportLib(%s, %s)' % (
parent_name,
code.globalstate.get_py_string_const(
EncodedString(env.module_name)).cname),
self.pos))
# CPython may not have put us into sys.modules yet, but relative imports and reimports require it
fq_module_name = self.full_module_name
if fq_module_name.endswith('.__init__'):
fq_module_name = fq_module_name[:-len('.__init__')]
code.putln("#if PY_MAJOR_VERSION >= 3")
code.putln("{")
code.putln("PyObject *modules = PyImport_GetModuleDict(); %s" %
code.error_goto_if_null("modules", self.pos))
code.putln('if (!PyDict_GetItemString(modules, "%s")) {' % fq_module_name)
code.putln(code.error_goto_if_neg('PyDict_SetItemString(modules, "%s", %s)' % (
fq_module_name, env.module_cname), self.pos))
code.putln("}")
code.putln("}")
code.putln("#endif")
def generate_module_cleanup_func(self, env, code):
if not Options.generate_cleanup_code:
return
code.putln('static void %s(CYTHON_UNUSED PyObject *self) {' %
Naming.cleanup_cname)
if Options.generate_cleanup_code >= 2:
code.putln("/*--- Global cleanup code ---*/")
rev_entries = list(env.var_entries)
rev_entries.reverse()
for entry in rev_entries:
if entry.visibility != 'extern':
if entry.type.is_pyobject and entry.used:
code.put_xdecref_clear(
entry.cname, entry.type,
clear_before_decref=True,
nanny=False)
code.putln("__Pyx_CleanupGlobals();")
if Options.generate_cleanup_code >= 3:
code.putln("/*--- Type import cleanup code ---*/")
for ext_type in sorted(env.types_imported, key=operator.attrgetter('typeptr_cname')):
code.put_xdecref_clear(
ext_type.typeptr_cname, ext_type,
clear_before_decref=True,
nanny=False)
if Options.cache_builtins:
code.putln("/*--- Builtin cleanup code ---*/")
for entry in env.cached_builtins:
code.put_xdecref_clear(
entry.cname, PyrexTypes.py_object_type,
clear_before_decref=True,
nanny=False)
code.putln("/*--- Intern cleanup code ---*/")
code.put_decref_clear(Naming.empty_tuple,
PyrexTypes.py_object_type,
clear_before_decref=True,
nanny=False)
for entry in env.c_class_entries:
cclass_type = entry.type
if cclass_type.is_external or cclass_type.base_type:
continue
if cclass_type.scope.directives.get('freelist', 0):
scope = cclass_type.scope
freelist_name = scope.mangle_internal(Naming.freelist_name)
freecount_name = scope.mangle_internal(Naming.freecount_name)
code.putln("while (%s > 0) {" % freecount_name)
code.putln("PyObject* o = (PyObject*)%s[--%s];" % (
freelist_name, freecount_name))
code.putln("(*Py_TYPE(o)->tp_free)(o);")
code.putln("}")
# for entry in env.pynum_entries:
# code.put_decref_clear(entry.cname,
# PyrexTypes.py_object_type,
# nanny=False)
# for entry in env.all_pystring_entries:
# if entry.is_interned:
# code.put_decref_clear(entry.pystring_cname,
# PyrexTypes.py_object_type,
# nanny=False)
# for entry in env.default_entries:
# if entry.type.is_pyobject and entry.used:
# code.putln("Py_DECREF(%s); %s = 0;" % (
# code.entry_as_pyobject(entry), entry.cname))
code.putln('#if CYTHON_COMPILING_IN_PYPY')
code.putln('Py_CLEAR(%s);' % Naming.builtins_cname)
code.putln('#endif')
code.put_decref_clear(env.module_dict_cname, py_object_type,
nanny=False, clear_before_decref=True)
def generate_main_method(self, env, code):
module_is_main = "%s%s" % (Naming.module_is_main, self.full_module_name.replace('.', '__'))
if Options.embed == "main":
wmain = "wmain"
else:
wmain = Options.embed
main_method = UtilityCode.load_cached("MainFunction", "Embed.c")
code.globalstate.use_utility_code(
main_method.specialize(
module_name=env.module_name,
module_is_main=module_is_main,
main_method=Options.embed,
wmain_method=wmain))
def generate_pymoduledef_struct(self, env, code):
if env.doc:
doc = "%s" % code.get_string_const(env.doc)
else:
doc = "0"
if Options.generate_cleanup_code:
cleanup_func = "(freefunc)%s" % Naming.cleanup_cname
else:
cleanup_func = 'NULL'
code.putln("")
code.putln("#if PY_MAJOR_VERSION >= 3")
code.putln("static struct PyModuleDef %s = {" % Naming.pymoduledef_cname)
code.putln("#if PY_VERSION_HEX < 0x03020000")
# fix C compiler warnings due to missing initialisers
code.putln(" { PyObject_HEAD_INIT(NULL) NULL, 0, NULL },")
code.putln("#else")
code.putln(" PyModuleDef_HEAD_INIT,")
code.putln("#endif")
code.putln(' "%s",' % env.module_name)
code.putln(" %s, /* m_doc */" % doc)
code.putln(" -1, /* m_size */")
code.putln(" %s /* m_methods */," % env.method_table_cname)
code.putln(" NULL, /* m_reload */")
code.putln(" NULL, /* m_traverse */")
code.putln(" NULL, /* m_clear */")
code.putln(" %s /* m_free */" % cleanup_func)
code.putln("};")
code.putln("#endif")
def generate_module_creation_code(self, env, code):
# Generate code to create the module object and
# install the builtins.
if env.doc:
doc = "%s" % code.get_string_const(env.doc)
else:
doc = "0"
code.putln("#if PY_MAJOR_VERSION < 3")
code.putln(
'%s = Py_InitModule4("%s", %s, %s, 0, PYTHON_API_VERSION); Py_XINCREF(%s);' % (
env.module_cname,
env.module_name,
env.method_table_cname,
doc,
env.module_cname))
code.putln("#else")
code.putln(
"%s = PyModule_Create(&%s);" % (
env.module_cname,
Naming.pymoduledef_cname))
code.putln("#endif")
code.putln(code.error_goto_if_null(env.module_cname, self.pos))
code.putln(
"%s = PyModule_GetDict(%s); %s" % (
env.module_dict_cname, env.module_cname,
code.error_goto_if_null(env.module_dict_cname, self.pos)))
code.put_incref(env.module_dict_cname, py_object_type, nanny=False)
code.putln(
'%s = PyImport_AddModule(__Pyx_BUILTIN_MODULE_NAME); %s' % (
Naming.builtins_cname,
code.error_goto_if_null(Naming.builtins_cname, self.pos)))
code.putln('#if CYTHON_COMPILING_IN_PYPY')
code.putln('Py_INCREF(%s);' % Naming.builtins_cname)
code.putln('#endif')
code.putln(
'if (PyObject_SetAttrString(%s, "__builtins__", %s) < 0) %s;' % (
env.module_cname,
Naming.builtins_cname,
code.error_goto(self.pos)))
if Options.pre_import is not None:
code.putln(
'%s = PyImport_AddModule("%s"); %s' % (
Naming.preimport_cname,
Options.pre_import,
code.error_goto_if_null(Naming.preimport_cname, self.pos)))
def generate_global_init_code(self, env, code):
# Generate code to initialise global PyObject *
# variables to None.
for entry in env.var_entries:
if entry.visibility != 'extern':
if entry.used:
entry.type.global_init_code(entry, code)
def generate_wrapped_entries_code(self, env, code):
for name, entry in sorted(env.entries.items()):
if (entry.create_wrapper
and not entry.is_type
and entry.scope is env):
if not entry.type.create_to_py_utility_code(env):
error(entry.pos, "Cannot convert '%s' to Python object" % entry.type)
code.putln("{")
code.putln("PyObject* wrapped = %s(%s);" % (
entry.type.to_py_function,
entry.cname))
code.putln(code.error_goto_if_null("wrapped", entry.pos))
code.putln(
'if (PyObject_SetAttrString(%s, "%s", wrapped) < 0) %s;' % (
env.module_cname,
name,
code.error_goto(entry.pos)))
code.putln("}")
def generate_c_variable_export_code(self, env, code):
# Generate code to create PyCFunction wrappers for exported C functions.
entries = []
for entry in env.var_entries:
if (entry.api
or entry.defined_in_pxd
or (Options.cimport_from_pyx and not entry.visibility == 'extern')):
entries.append(entry)
if entries:
env.use_utility_code(UtilityCode.load_cached("VoidPtrExport", "ImportExport.c"))
for entry in entries:
signature = entry.type.empty_declaration_code()
name = code.intern_identifier(entry.name)
code.putln('if (__Pyx_ExportVoidPtr(%s, (void *)&%s, "%s") < 0) %s' % (
name, entry.cname, signature,
code.error_goto(self.pos)))
def generate_c_function_export_code(self, env, code):
# Generate code to create PyCFunction wrappers for exported C functions.
entries = []
for entry in env.cfunc_entries:
if (entry.api
or entry.defined_in_pxd
or (Options.cimport_from_pyx and not entry.visibility == 'extern')):
entries.append(entry)
if entries:
env.use_utility_code(
UtilityCode.load_cached("FunctionExport", "ImportExport.c"))
for entry in entries:
signature = entry.type.signature_string()
code.putln('if (__Pyx_ExportFunction("%s", (void (*)(void))%s, "%s") < 0) %s' % (
entry.name,
entry.cname,
signature,
code.error_goto(self.pos)))
def generate_type_import_code_for_module(self, module, env, code):
# Generate type import code for all exported extension types in
# an imported module.
#if module.c_class_entries:
for entry in module.c_class_entries:
if entry.defined_in_pxd:
self.generate_type_import_code(env, entry.type, entry.pos, code)
def specialize_fused_types(self, pxd_env):
"""
If fused c(p)def functions are defined in an imported pxd, but not
used in this implementation file, we still have fused entries and
not specialized ones. This method replaces any fused entries with their
specialized ones.
"""
for entry in pxd_env.cfunc_entries[:]:
if entry.type.is_fused:
# This call modifies the cfunc_entries in-place
entry.type.get_all_specialized_function_types()
def generate_c_variable_import_code_for_module(self, module, env, code):
# Generate import code for all exported C functions in a cimported module.
entries = []
for entry in module.var_entries:
if entry.defined_in_pxd:
entries.append(entry)
if entries:
env.use_utility_code(
UtilityCode.load_cached("ModuleImport", "ImportExport.c"))
env.use_utility_code(
UtilityCode.load_cached("VoidPtrImport", "ImportExport.c"))
temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln(
'%s = __Pyx_ImportModule("%s"); if (!%s) %s' % (
temp,
module.qualified_name,
temp,
code.error_goto(self.pos)))
for entry in entries:
if env is module:
cname = entry.cname
else:
cname = module.mangle(Naming.varptr_prefix, entry.name)
signature = entry.type.empty_declaration_code()
code.putln(
'if (__Pyx_ImportVoidPtr(%s, "%s", (void **)&%s, "%s") < 0) %s' % (
temp, entry.name, cname, signature,
code.error_goto(self.pos)))
code.putln("Py_DECREF(%s); %s = 0;" % (temp, temp))
def generate_c_function_import_code_for_module(self, module, env, code):
# Generate import code for all exported C functions in a cimported module.
entries = []
for entry in module.cfunc_entries:
if entry.defined_in_pxd and entry.used:
entries.append(entry)
if entries:
env.use_utility_code(
UtilityCode.load_cached("ModuleImport", "ImportExport.c"))
env.use_utility_code(
UtilityCode.load_cached("FunctionImport", "ImportExport.c"))
temp = code.funcstate.allocate_temp(py_object_type, manage_ref=True)
code.putln(
'%s = __Pyx_ImportModule("%s"); if (!%s) %s' % (
temp,
module.qualified_name,
temp,
code.error_goto(self.pos)))
for entry in entries:
code.putln(
'if (__Pyx_ImportFunction(%s, "%s", (void (**)(void))&%s, "%s") < 0) %s' % (
temp,
entry.name,
entry.cname,
entry.type.signature_string(),
code.error_goto(self.pos)))
code.putln("Py_DECREF(%s); %s = 0;" % (temp, temp))
def generate_type_init_code(self, env, code):
# Generate type import code for extern extension types
# and type ready code for non-extern ones.
for entry in env.c_class_entries:
if entry.visibility == 'extern' and not entry.utility_code_definition:
self.generate_type_import_code(env, entry.type, entry.pos, code)
else:
self.generate_base_type_import_code(env, entry, code)
self.generate_exttype_vtable_init_code(entry, code)
self.generate_type_ready_code(env, entry, code)
self.generate_typeptr_assignment_code(entry, code)
def generate_base_type_import_code(self, env, entry, code):
base_type = entry.type.base_type
if (base_type and base_type.module_name != env.qualified_name and not
base_type.is_builtin_type and not entry.utility_code_definition):
self.generate_type_import_code(env, base_type, self.pos, code)
def generate_type_import_code(self, env, type, pos, code):
# If not already done, generate code to import the typeobject of an
# extension type defined in another module, and extract its C method
# table pointer if any.
if type in env.types_imported:
return
env.use_utility_code(UtilityCode.load_cached("TypeImport", "ImportExport.c"))
self.generate_type_import_call(type, code,
code.error_goto_if_null(type.typeptr_cname, pos))
if type.vtabptr_cname:
code.globalstate.use_utility_code(
UtilityCode.load_cached('GetVTable', 'ImportExport.c'))
code.putln("%s = (struct %s*)__Pyx_GetVtable(%s->tp_dict); %s" % (
type.vtabptr_cname,
type.vtabstruct_cname,
type.typeptr_cname,
code.error_goto_if_null(type.vtabptr_cname, pos)))
env.types_imported.add(type)
py3_type_name_map = {'str' : 'bytes', 'unicode' : 'str'}
def generate_type_import_call(self, type, code, error_code):
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
sizeof_objstruct = objstruct
module_name = type.module_name
condition = replacement = None
if module_name not in ('__builtin__', 'builtins'):
module_name = '"%s"' % module_name
else:
module_name = '__Pyx_BUILTIN_MODULE_NAME'
if type.name in Code.non_portable_builtins_map:
condition, replacement = Code.non_portable_builtins_map[type.name]
if objstruct in Code.basicsize_builtins_map:
# Some builtin types have a tp_basicsize which differs from sizeof(...):
sizeof_objstruct = Code.basicsize_builtins_map[objstruct]
code.put('%s = __Pyx_ImportType(%s,' % (
type.typeptr_cname,
module_name))
if condition and replacement:
code.putln("") # start in new line
code.putln("#if %s" % condition)
code.putln('"%s",' % replacement)
code.putln("#else")
code.putln('"%s",' % type.name)
code.putln("#endif")
else:
code.put(' "%s", ' % type.name)
if sizeof_objstruct != objstruct:
if not condition:
code.putln("") # start in new line
code.putln("#if CYTHON_COMPILING_IN_PYPY")
code.putln('sizeof(%s),' % objstruct)
code.putln("#else")
code.putln('sizeof(%s),' % sizeof_objstruct)
code.putln("#endif")
else:
code.put('sizeof(%s), ' % objstruct)
code.putln('%i); %s' % (
not type.is_external or type.is_subclassed,
error_code))
def generate_type_ready_code(self, env, entry, code):
# Generate a call to PyType_Ready for an extension
# type defined in this module.
type = entry.type
typeobj_cname = type.typeobj_cname
scope = type.scope
if scope: # could be None if there was an error
if entry.visibility != 'extern':
for slot in TypeSlots.slot_table:
slot.generate_dynamic_init_code(scope, code)
code.putln(
"if (PyType_Ready(&%s) < 0) %s" % (
typeobj_cname,
code.error_goto(entry.pos)))
# Don't inherit tp_print from builtin types, restoring the
# behavior of using tp_repr or tp_str instead.
code.putln("%s.tp_print = 0;" % typeobj_cname)
# Fix special method docstrings. This is a bit of a hack, but
# unless we let PyType_Ready create the slot wrappers we have
# a significant performance hit. (See trac #561.)
for func in entry.type.scope.pyfunc_entries:
is_buffer = func.name in ('__getbuffer__', '__releasebuffer__')
if (func.is_special and Options.docstrings and
func.wrapperbase_cname and not is_buffer):
slot = TypeSlots.method_name_to_slot[func.name]
preprocessor_guard = slot.preprocessor_guard_code()
if preprocessor_guard:
code.putln(preprocessor_guard)
code.putln('#if CYTHON_COMPILING_IN_CPYTHON')
code.putln("{")
code.putln(
'PyObject *wrapper = PyObject_GetAttrString((PyObject *)&%s, "%s"); %s' % (
typeobj_cname,
func.name,
code.error_goto_if_null('wrapper', entry.pos)))
code.putln(
"if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) {")
code.putln(
"%s = *((PyWrapperDescrObject *)wrapper)->d_base;" % (
func.wrapperbase_cname))
code.putln(
"%s.doc = %s;" % (func.wrapperbase_cname, func.doc_cname))
code.putln(
"((PyWrapperDescrObject *)wrapper)->d_base = &%s;" % (
func.wrapperbase_cname))
code.putln("}")
code.putln("}")
code.putln('#endif')
if preprocessor_guard:
code.putln('#endif')
if type.vtable_cname:
code.putln(
"if (__Pyx_SetVtable(%s.tp_dict, %s) < 0) %s" % (
typeobj_cname,
type.vtabptr_cname,
code.error_goto(entry.pos)))
code.globalstate.use_utility_code(
UtilityCode.load_cached('SetVTable', 'ImportExport.c'))
if not type.scope.is_internal and not type.scope.directives['internal']:
# scope.is_internal is set for types defined by
# Cython (such as closures), the 'internal'
# directive is set by users
code.putln(
'if (PyObject_SetAttrString(%s, "%s", (PyObject *)&%s) < 0) %s' % (
Naming.module_cname,
scope.class_name,
typeobj_cname,
code.error_goto(entry.pos)))
weakref_entry = scope.lookup_here("__weakref__")
if weakref_entry:
if weakref_entry.type is py_object_type:
tp_weaklistoffset = "%s.tp_weaklistoffset" % typeobj_cname
if type.typedef_flag:
objstruct = type.objstruct_cname
else:
objstruct = "struct %s" % type.objstruct_cname
code.putln("if (%s == 0) %s = offsetof(%s, %s);" % (
tp_weaklistoffset,
tp_weaklistoffset,
objstruct,
weakref_entry.cname))
else:
error(weakref_entry.pos, "__weakref__ slot must be of type 'object'")
def generate_exttype_vtable_init_code(self, entry, code):
# Generate code to initialise the C method table of an
# extension type.
type = entry.type
if type.vtable_cname:
code.putln(
"%s = &%s;" % (
type.vtabptr_cname,
type.vtable_cname))
if type.base_type and type.base_type.vtabptr_cname:
code.putln(
"%s.%s = *%s;" % (
type.vtable_cname,
Naming.obj_base_cname,
type.base_type.vtabptr_cname))
c_method_entries = [
entry for entry in type.scope.cfunc_entries
if entry.func_cname]
if c_method_entries:
for meth_entry in c_method_entries:
cast = meth_entry.type.signature_cast_string()
code.putln(
"%s.%s = %s%s;" % (
type.vtable_cname,
meth_entry.cname,
cast,
meth_entry.func_cname))
def generate_typeptr_assignment_code(self, entry, code):
# Generate code to initialise the typeptr of an extension
# type defined in this module to point to its type object.
type = entry.type
if type.typeobj_cname:
code.putln(
"%s = &%s;" % (
type.typeptr_cname, type.typeobj_cname))
def generate_cfunction_declaration(entry, env, code, definition):
from_cy_utility = entry.used and entry.utility_code_definition
if entry.used and entry.inline_func_in_pxd or (not entry.in_cinclude and (
definition or entry.defined_in_pxd or entry.visibility == 'extern' or from_cy_utility)):
if entry.visibility == 'extern':
storage_class = Naming.extern_c_macro
dll_linkage = "DL_IMPORT"
elif entry.visibility == 'public':
storage_class = Naming.extern_c_macro
dll_linkage = "DL_EXPORT"
elif entry.visibility == 'private':
storage_class = "static"
dll_linkage = None
else:
storage_class = "static"
dll_linkage = None
type = entry.type
if entry.defined_in_pxd and not definition:
storage_class = "static"
dll_linkage = None
type = CPtrType(type)
header = type.declaration_code(
entry.cname, dll_linkage=dll_linkage)
modifiers = code.build_function_modifiers(entry.func_modifiers)
code.putln("%s %s%s; /*proto*/" % (
storage_class,
modifiers,
header))
#------------------------------------------------------------------------------------
#
# Runtime support code
#
#------------------------------------------------------------------------------------
refnanny_utility_code = UtilityCode.load("Refnanny", "ModuleSetupCode.c")
packed_struct_utility_code = UtilityCode(proto="""
#if defined(__GNUC__)
#define __Pyx_PACKED __attribute__((__packed__))
#else
#define __Pyx_PACKED
#endif
""", impl="", proto_block='utility_code_proto_before_types')
capsule_utility_code = UtilityCode.load("Capsule")
| 43.551206
| 120
| 0.566172
|
73535c513e3205d95f1d2f9517520e396ac1b2bc
| 452
|
py
|
Python
|
example/demo_context.py
|
oggthemiffed/Flask-Spring
|
aad68df561fd6b759a03c8ab118d4b0619ce0cec
|
[
"MIT"
] | 1
|
2021-12-11T07:49:45.000Z
|
2021-12-11T07:49:45.000Z
|
example/demo_context.py
|
oggthemiffed/Flask-Spring
|
aad68df561fd6b759a03c8ab118d4b0619ce0cec
|
[
"MIT"
] | null | null | null |
example/demo_context.py
|
oggthemiffed/Flask-Spring
|
aad68df561fd6b759a03c8ab118d4b0619ce0cec
|
[
"MIT"
] | 1
|
2017-05-21T13:51:57.000Z
|
2017-05-21T13:51:57.000Z
|
from springpython.config import PythonConfig, Object
from springpython.context import scope
from objects.test_obj import TestObject
__author__ = 'david'
class DemoApplicationContext(PythonConfig):
def __init__(self):
super (DemoApplicationContext, self).__init__()
@Object(scope.SINGLETON, lazy_init=True)
def TestObject(self):
to = TestObject()
to.message = 'Message set by the object context'
return to
| 28.25
| 56
| 0.730088
|
d0e7d8f3cfa3df2f8c0ef82de28d40205589d9cf
| 26,781
|
py
|
Python
|
lib/sqlalchemy/testing/fixtures.py
|
sqlalchemy-bot/sqlalchemy
|
c0736e0b2a3bf8c0952db84f5b9943df9ebf18f7
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/testing/fixtures.py
|
sqlalchemy-bot/sqlalchemy
|
c0736e0b2a3bf8c0952db84f5b9943df9ebf18f7
|
[
"MIT"
] | null | null | null |
lib/sqlalchemy/testing/fixtures.py
|
sqlalchemy-bot/sqlalchemy
|
c0736e0b2a3bf8c0952db84f5b9943df9ebf18f7
|
[
"MIT"
] | null | null | null |
# testing/fixtures.py
# Copyright (C) 2005-2022 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from __future__ import annotations
import re
import sys
import sqlalchemy as sa
from . import assertions
from . import config
from . import schema
from .entities import BasicEntity
from .entities import ComparableEntity
from .entities import ComparableMixin # noqa
from .util import adict
from .util import drop_all_tables_from_metadata
from .. import event
from .. import util
from ..orm import declarative_base
from ..orm import DeclarativeBase
from ..orm import MappedAsDataclass
from ..orm import registry
from ..schema import sort_tables_and_constraints
@config.mark_base_test_class()
class TestBase:
# A sequence of requirement names matching testing.requires decorators
__requires__ = ()
# A sequence of dialect names to exclude from the test class.
__unsupported_on__ = ()
# If present, test class is only runnable for the *single* specified
# dialect. If you need multiple, use __unsupported_on__ and invert.
__only_on__ = None
# A sequence of no-arg callables. If any are True, the entire testcase is
# skipped.
__skip_if__ = None
# if True, the testing reaper will not attempt to touch connection
# state after a test is completed and before the outer teardown
# starts
__leave_connections_for_teardown__ = False
def assert_(self, val, msg=None):
assert val, msg
@config.fixture()
def nocache(self):
_cache = config.db._compiled_cache
config.db._compiled_cache = None
yield
config.db._compiled_cache = _cache
@config.fixture()
def connection_no_trans(self):
eng = getattr(self, "bind", None) or config.db
with eng.connect() as conn:
yield conn
@config.fixture()
def connection(self):
global _connection_fixture_connection
eng = getattr(self, "bind", None) or config.db
conn = eng.connect()
trans = conn.begin()
_connection_fixture_connection = conn
yield conn
_connection_fixture_connection = None
if trans.is_active:
trans.rollback()
# trans would not be active here if the test is using
# the legacy @provide_metadata decorator still, as it will
# run a close all connections.
conn.close()
@config.fixture()
def close_result_when_finished(self):
to_close = []
def go(result):
to_close.append(result)
yield go
for r in to_close:
try:
r.close()
except:
pass
@config.fixture()
def registry(self, metadata):
reg = registry(
metadata=metadata,
type_annotation_map={
str: sa.String().with_variant(
sa.String(50), "mysql", "mariadb"
)
},
)
yield reg
reg.dispose()
@config.fixture
def decl_base(self, metadata):
_md = metadata
class Base(DeclarativeBase):
metadata = _md
type_annotation_map = {
str: sa.String().with_variant(
sa.String(50), "mysql", "mariadb"
)
}
yield Base
Base.registry.dispose()
@config.fixture
def dc_decl_base(self, metadata):
_md = metadata
class Base(MappedAsDataclass, DeclarativeBase):
metadata = _md
type_annotation_map = {
str: sa.String().with_variant(
sa.String(50), "mysql", "mariadb"
)
}
yield Base
Base.registry.dispose()
@config.fixture()
def future_connection(self, future_engine, connection):
# integrate the future_engine and connection fixtures so
# that users of the "connection" fixture will get at the
# "future" connection
yield connection
@config.fixture()
def future_engine(self):
yield
@config.fixture()
def testing_engine(self):
from . import engines
def gen_testing_engine(
url=None,
options=None,
future=None,
asyncio=False,
transfer_staticpool=False,
share_pool=False,
):
if options is None:
options = {}
options["scope"] = "fixture"
return engines.testing_engine(
url=url,
options=options,
asyncio=asyncio,
transfer_staticpool=transfer_staticpool,
share_pool=share_pool,
)
yield gen_testing_engine
engines.testing_reaper._drop_testing_engines("fixture")
@config.fixture()
def async_testing_engine(self, testing_engine):
def go(**kw):
kw["asyncio"] = True
return testing_engine(**kw)
return go
@config.fixture
def fixture_session(self):
return fixture_session()
@config.fixture()
def metadata(self, request):
"""Provide bound MetaData for a single test, dropping afterwards."""
from ..sql import schema
metadata = schema.MetaData()
request.instance.metadata = metadata
yield metadata
del request.instance.metadata
if (
_connection_fixture_connection
and _connection_fixture_connection.in_transaction()
):
trans = _connection_fixture_connection.get_transaction()
trans.rollback()
with _connection_fixture_connection.begin():
drop_all_tables_from_metadata(
metadata, _connection_fixture_connection
)
else:
drop_all_tables_from_metadata(metadata, config.db)
@config.fixture(
params=[
(rollback, second_operation, begin_nested)
for rollback in (True, False)
for second_operation in ("none", "execute", "begin")
for begin_nested in (
True,
False,
)
]
)
def trans_ctx_manager_fixture(self, request, metadata):
rollback, second_operation, begin_nested = request.param
from sqlalchemy import Table, Column, Integer, func, select
from . import eq_
t = Table("test", metadata, Column("data", Integer))
eng = getattr(self, "bind", None) or config.db
t.create(eng)
def run_test(subject, trans_on_subject, execute_on_subject):
with subject.begin() as trans:
if begin_nested:
if not config.requirements.savepoints.enabled:
config.skip_test("savepoints not enabled")
if execute_on_subject:
nested_trans = subject.begin_nested()
else:
nested_trans = trans.begin_nested()
with nested_trans:
if execute_on_subject:
subject.execute(t.insert(), {"data": 10})
else:
trans.execute(t.insert(), {"data": 10})
# for nested trans, we always commit/rollback on the
# "nested trans" object itself.
# only Session(future=False) will affect savepoint
# transaction for session.commit/rollback
if rollback:
nested_trans.rollback()
else:
nested_trans.commit()
if second_operation != "none":
with assertions.expect_raises_message(
sa.exc.InvalidRequestError,
"Can't operate on closed transaction "
"inside context "
"manager. Please complete the context "
"manager "
"before emitting further commands.",
):
if second_operation == "execute":
if execute_on_subject:
subject.execute(
t.insert(), {"data": 12}
)
else:
trans.execute(t.insert(), {"data": 12})
elif second_operation == "begin":
if execute_on_subject:
subject.begin_nested()
else:
trans.begin_nested()
# outside the nested trans block, but still inside the
# transaction block, we can run SQL, and it will be
# committed
if execute_on_subject:
subject.execute(t.insert(), {"data": 14})
else:
trans.execute(t.insert(), {"data": 14})
else:
if execute_on_subject:
subject.execute(t.insert(), {"data": 10})
else:
trans.execute(t.insert(), {"data": 10})
if trans_on_subject:
if rollback:
subject.rollback()
else:
subject.commit()
else:
if rollback:
trans.rollback()
else:
trans.commit()
if second_operation != "none":
with assertions.expect_raises_message(
sa.exc.InvalidRequestError,
"Can't operate on closed transaction inside "
"context "
"manager. Please complete the context manager "
"before emitting further commands.",
):
if second_operation == "execute":
if execute_on_subject:
subject.execute(t.insert(), {"data": 12})
else:
trans.execute(t.insert(), {"data": 12})
elif second_operation == "begin":
if hasattr(trans, "begin"):
trans.begin()
else:
subject.begin()
elif second_operation == "begin_nested":
if execute_on_subject:
subject.begin_nested()
else:
trans.begin_nested()
expected_committed = 0
if begin_nested:
# begin_nested variant, we inserted a row after the nested
# block
expected_committed += 1
if not rollback:
# not rollback variant, our row inserted in the target
# block itself would be committed
expected_committed += 1
if execute_on_subject:
eq_(
subject.scalar(select(func.count()).select_from(t)),
expected_committed,
)
else:
with subject.connect() as conn:
eq_(
conn.scalar(select(func.count()).select_from(t)),
expected_committed,
)
return run_test
_connection_fixture_connection = None
class FutureEngineMixin:
"""alembic's suite still using this"""
class TablesTest(TestBase):
# 'once', None
run_setup_bind = "once"
# 'once', 'each', None
run_define_tables = "once"
# 'once', 'each', None
run_create_tables = "once"
# 'once', 'each', None
run_inserts = "each"
# 'each', None
run_deletes = "each"
# 'once', None
run_dispose_bind = None
bind = None
_tables_metadata = None
tables = None
other = None
sequences = None
@config.fixture(autouse=True, scope="class")
def _setup_tables_test_class(self):
cls = self.__class__
cls._init_class()
cls._setup_once_tables()
cls._setup_once_inserts()
yield
cls._teardown_once_metadata_bind()
@config.fixture(autouse=True, scope="function")
def _setup_tables_test_instance(self):
self._setup_each_tables()
self._setup_each_inserts()
yield
self._teardown_each_tables()
@property
def tables_test_metadata(self):
return self._tables_metadata
@classmethod
def _init_class(cls):
if cls.run_define_tables == "each":
if cls.run_create_tables == "once":
cls.run_create_tables = "each"
assert cls.run_inserts in ("each", None)
cls.other = adict()
cls.tables = adict()
cls.sequences = adict()
cls.bind = cls.setup_bind()
cls._tables_metadata = sa.MetaData()
@classmethod
def _setup_once_inserts(cls):
if cls.run_inserts == "once":
cls._load_fixtures()
with cls.bind.begin() as conn:
cls.insert_data(conn)
@classmethod
def _setup_once_tables(cls):
if cls.run_define_tables == "once":
cls.define_tables(cls._tables_metadata)
if cls.run_create_tables == "once":
cls._tables_metadata.create_all(cls.bind)
cls.tables.update(cls._tables_metadata.tables)
cls.sequences.update(cls._tables_metadata._sequences)
def _setup_each_tables(self):
if self.run_define_tables == "each":
self.define_tables(self._tables_metadata)
if self.run_create_tables == "each":
self._tables_metadata.create_all(self.bind)
self.tables.update(self._tables_metadata.tables)
self.sequences.update(self._tables_metadata._sequences)
elif self.run_create_tables == "each":
self._tables_metadata.create_all(self.bind)
def _setup_each_inserts(self):
if self.run_inserts == "each":
self._load_fixtures()
with self.bind.begin() as conn:
self.insert_data(conn)
def _teardown_each_tables(self):
if self.run_define_tables == "each":
self.tables.clear()
if self.run_create_tables == "each":
drop_all_tables_from_metadata(self._tables_metadata, self.bind)
self._tables_metadata.clear()
elif self.run_create_tables == "each":
drop_all_tables_from_metadata(self._tables_metadata, self.bind)
savepoints = getattr(config.requirements, "savepoints", False)
if savepoints:
savepoints = savepoints.enabled
# no need to run deletes if tables are recreated on setup
if (
self.run_define_tables != "each"
and self.run_create_tables != "each"
and self.run_deletes == "each"
):
with self.bind.begin() as conn:
for table in reversed(
[
t
for (t, fks) in sort_tables_and_constraints(
self._tables_metadata.tables.values()
)
if t is not None
]
):
try:
if savepoints:
with conn.begin_nested():
conn.execute(table.delete())
else:
conn.execute(table.delete())
except sa.exc.DBAPIError as ex:
print(
("Error emptying table %s: %r" % (table, ex)),
file=sys.stderr,
)
@classmethod
def _teardown_once_metadata_bind(cls):
if cls.run_create_tables:
drop_all_tables_from_metadata(cls._tables_metadata, cls.bind)
if cls.run_dispose_bind == "once":
cls.dispose_bind(cls.bind)
cls._tables_metadata.bind = None
if cls.run_setup_bind is not None:
cls.bind = None
@classmethod
def setup_bind(cls):
return config.db
@classmethod
def dispose_bind(cls, bind):
if hasattr(bind, "dispose"):
bind.dispose()
elif hasattr(bind, "close"):
bind.close()
@classmethod
def define_tables(cls, metadata):
pass
@classmethod
def fixtures(cls):
return {}
@classmethod
def insert_data(cls, connection):
pass
def sql_count_(self, count, fn):
self.assert_sql_count(self.bind, fn, count)
def sql_eq_(self, callable_, statements):
self.assert_sql(self.bind, callable_, statements)
@classmethod
def _load_fixtures(cls):
"""Insert rows as represented by the fixtures() method."""
headers, rows = {}, {}
for table, data in cls.fixtures().items():
if len(data) < 2:
continue
if isinstance(table, str):
table = cls.tables[table]
headers[table] = data[0]
rows[table] = data[1:]
for table, fks in sort_tables_and_constraints(
cls._tables_metadata.tables.values()
):
if table is None:
continue
if table not in headers:
continue
with cls.bind.begin() as conn:
conn.execute(
table.insert(),
[
dict(zip(headers[table], column_values))
for column_values in rows[table]
],
)
class NoCache:
@config.fixture(autouse=True, scope="function")
def _disable_cache(self):
_cache = config.db._compiled_cache
config.db._compiled_cache = None
yield
config.db._compiled_cache = _cache
class RemovesEvents:
@util.memoized_property
def _event_fns(self):
return set()
def event_listen(self, target, name, fn, **kw):
self._event_fns.add((target, name, fn))
event.listen(target, name, fn, **kw)
@config.fixture(autouse=True, scope="function")
def _remove_events(self):
yield
for key in self._event_fns:
event.remove(*key)
_fixture_sessions = set()
def fixture_session(**kw):
kw.setdefault("autoflush", True)
kw.setdefault("expire_on_commit", True)
bind = kw.pop("bind", config.db)
sess = sa.orm.Session(bind, **kw)
_fixture_sessions.add(sess)
return sess
def _close_all_sessions():
# will close all still-referenced sessions
sa.orm.session.close_all_sessions()
_fixture_sessions.clear()
def stop_test_class_inside_fixtures(cls):
_close_all_sessions()
sa.orm.clear_mappers()
def after_test():
if _fixture_sessions:
_close_all_sessions()
class ORMTest(TestBase):
pass
class MappedTest(TablesTest, assertions.AssertsExecutionResults):
# 'once', 'each', None
run_setup_classes = "once"
# 'once', 'each', None
run_setup_mappers = "each"
classes = None
@config.fixture(autouse=True, scope="class")
def _setup_tables_test_class(self):
cls = self.__class__
cls._init_class()
if cls.classes is None:
cls.classes = adict()
cls._setup_once_tables()
cls._setup_once_classes()
cls._setup_once_mappers()
cls._setup_once_inserts()
yield
cls._teardown_once_class()
cls._teardown_once_metadata_bind()
@config.fixture(autouse=True, scope="function")
def _setup_tables_test_instance(self):
self._setup_each_tables()
self._setup_each_classes()
self._setup_each_mappers()
self._setup_each_inserts()
yield
sa.orm.session.close_all_sessions()
self._teardown_each_mappers()
self._teardown_each_classes()
self._teardown_each_tables()
@classmethod
def _teardown_once_class(cls):
cls.classes.clear()
@classmethod
def _setup_once_classes(cls):
if cls.run_setup_classes == "once":
cls._with_register_classes(cls.setup_classes)
@classmethod
def _setup_once_mappers(cls):
if cls.run_setup_mappers == "once":
cls.mapper_registry, cls.mapper = cls._generate_registry()
cls._with_register_classes(cls.setup_mappers)
def _setup_each_mappers(self):
if self.run_setup_mappers != "once":
(
self.__class__.mapper_registry,
self.__class__.mapper,
) = self._generate_registry()
if self.run_setup_mappers == "each":
self._with_register_classes(self.setup_mappers)
def _setup_each_classes(self):
if self.run_setup_classes == "each":
self._with_register_classes(self.setup_classes)
@classmethod
def _generate_registry(cls):
decl = registry(metadata=cls._tables_metadata)
return decl, decl.map_imperatively
@classmethod
def _with_register_classes(cls, fn):
"""Run a setup method, framing the operation with a Base class
that will catch new subclasses to be established within
the "classes" registry.
"""
cls_registry = cls.classes
class _Base:
def __init_subclass__(cls) -> None:
assert cls_registry is not None
cls_registry[cls.__name__] = cls
super().__init_subclass__()
class Basic(BasicEntity, _Base):
pass
class Comparable(ComparableEntity, _Base):
pass
cls.Basic = Basic
cls.Comparable = Comparable
fn()
def _teardown_each_mappers(self):
# some tests create mappers in the test bodies
# and will define setup_mappers as None -
# clear mappers in any case
if self.run_setup_mappers != "once":
sa.orm.clear_mappers()
def _teardown_each_classes(self):
if self.run_setup_classes != "once":
self.classes.clear()
@classmethod
def setup_classes(cls):
pass
@classmethod
def setup_mappers(cls):
pass
class DeclarativeMappedTest(MappedTest):
run_setup_classes = "once"
run_setup_mappers = "once"
@classmethod
def _setup_once_tables(cls):
pass
@classmethod
def _with_register_classes(cls, fn):
cls_registry = cls.classes
class DeclarativeBasic:
__table_cls__ = schema.Table
def __init_subclass__(cls) -> None:
assert cls_registry is not None
cls_registry[cls.__name__] = cls
super().__init_subclass__()
_DeclBase = declarative_base(
metadata=cls._tables_metadata,
cls=DeclarativeBasic,
)
cls.DeclarativeBasic = _DeclBase
# sets up cls.Basic which is helpful for things like composite
# classes
super(DeclarativeMappedTest, cls)._with_register_classes(fn)
if cls._tables_metadata.tables and cls.run_create_tables:
cls._tables_metadata.create_all(config.db)
class ComputedReflectionFixtureTest(TablesTest):
run_inserts = run_deletes = None
__backend__ = True
__requires__ = ("computed_columns", "table_reflection")
regexp = re.compile(r"[\[\]\(\)\s`'\"]*")
def normalize(self, text):
return self.regexp.sub("", text).lower()
@classmethod
def define_tables(cls, metadata):
from .. import Integer
from .. import testing
from ..schema import Column
from ..schema import Computed
from ..schema import Table
Table(
"computed_default_table",
metadata,
Column("id", Integer, primary_key=True),
Column("normal", Integer),
Column("computed_col", Integer, Computed("normal + 42")),
Column("with_default", Integer, server_default="42"),
)
t = Table(
"computed_column_table",
metadata,
Column("id", Integer, primary_key=True),
Column("normal", Integer),
Column("computed_no_flag", Integer, Computed("normal + 42")),
)
if testing.requires.schemas.enabled:
t2 = Table(
"computed_column_table",
metadata,
Column("id", Integer, primary_key=True),
Column("normal", Integer),
Column("computed_no_flag", Integer, Computed("normal / 42")),
schema=config.test_schema,
)
if testing.requires.computed_columns_virtual.enabled:
t.append_column(
Column(
"computed_virtual",
Integer,
Computed("normal + 2", persisted=False),
)
)
if testing.requires.schemas.enabled:
t2.append_column(
Column(
"computed_virtual",
Integer,
Computed("normal / 2", persisted=False),
)
)
if testing.requires.computed_columns_stored.enabled:
t.append_column(
Column(
"computed_stored",
Integer,
Computed("normal - 42", persisted=True),
)
)
if testing.requires.schemas.enabled:
t2.append_column(
Column(
"computed_stored",
Integer,
Computed("normal * 42", persisted=True),
)
)
| 30.571918
| 79
| 0.540682
|
1338c4976b5d10cb9c4a852d735e6db48a00dac4
| 768
|
py
|
Python
|
src/rics/translation/dio/exceptions.py
|
rsundqvist/rics
|
c67ff6703facb3170535dcf173d7e55734cedbc6
|
[
"MIT"
] | 1
|
2022-02-24T22:12:13.000Z
|
2022-02-24T22:12:13.000Z
|
src/rics/translation/dio/exceptions.py
|
rsundqvist/rics
|
c67ff6703facb3170535dcf173d7e55734cedbc6
|
[
"MIT"
] | 26
|
2022-02-24T21:08:51.000Z
|
2022-03-19T19:55:26.000Z
|
src/rics/translation/dio/exceptions.py
|
rsundqvist/rics
|
c67ff6703facb3170535dcf173d7e55734cedbc6
|
[
"MIT"
] | null | null | null |
"""Data structure I exceptions."""
from typing import Any, Type
class DataStructureIOError(RuntimeError):
"""Base class for translator exceptions."""
class UntranslatableTypeError(DataStructureIOError):
"""Exception indicating that a type cannot be translated.
Args:
t: A type.
"""
def __init__(self, t: Type[Any]) -> None:
super().__init__(f"Type {t} cannot be translated.")
class NotInplaceTranslatableError(DataStructureIOError):
"""Exception indicating that a type cannot be translated in-place.
Args:
arg: Something that can't be translated inplace.
"""
def __init__(self, arg: Any) -> None:
super().__init__(f"Inplace translation not possible or implemented for type: {type(arg)}")
| 25.6
| 98
| 0.686198
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.