hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fbd342c6beac3ee2504504134e1128563e6f6347
| 1,099
|
py
|
Python
|
src/options/split/backward/probability.py
|
DenDen047/SpatialNetworks
|
62a076d12af474b19b406e605d970662d9699cdf
|
[
"MIT"
] | 3
|
2019-12-15T23:29:11.000Z
|
2020-05-08T03:26:20.000Z
|
src/options/split/backward/probability.py
|
DenDen047/SpatialNetworks
|
62a076d12af474b19b406e605d970662d9699cdf
|
[
"MIT"
] | null | null | null |
src/options/split/backward/probability.py
|
DenDen047/SpatialNetworks
|
62a076d12af474b19b406e605d970662d9699cdf
|
[
"MIT"
] | 3
|
2019-12-30T15:49:57.000Z
|
2020-04-30T08:06:18.000Z
|
import torch
from . import _base
class Masker(_base.Base):
"""For each task, each neuron has probability to be kept.
The more connected neuron to task, the higher the probability to keep it.
Other neurons will be zeroed-out
Parameters
----------
labels: int
How many labels were used for each task
last: Optional[torch.Tensor]
Last mask created by masker
"""
@staticmethod
def _create_mask(label_summed):
maximum, _ = label_summed.max(dim=0)
return torch.bernoulli(label_summed / maximum.unsqueeze(0)).bool()
def first(self, weight):
return Masker._create_mask(
weight.reshape(self.labels, -1, weight.shape[1]).sum(dim=0)
)
def rest(self, weight):
return Masker._create_mask(
torch.stack(
[
weight[self.last[task]].sum(dim=0)
for task in range(self.last.shape[0])
]
)
)
def apply(self, weight, mask, task) -> None:
weight *= mask[task].unsqueeze(0)
| 25.55814
| 77
| 0.576888
|
f64e39a1d74bad38e2d21a305a9c5ab01ea305eb
| 13,975
|
py
|
Python
|
tests/test_all.py
|
sebastian-zieba/PACMAN
|
2eb1e4b450c97dc28d5a05b3ebddd80706cfca79
|
[
"MIT"
] | 1
|
2022-03-23T10:26:33.000Z
|
2022-03-23T10:26:33.000Z
|
tests/test_all.py
|
sebastian-zieba/PACMAN
|
2eb1e4b450c97dc28d5a05b3ebddd80706cfca79
|
[
"MIT"
] | null | null | null |
tests/test_all.py
|
sebastian-zieba/PACMAN
|
2eb1e4b450c97dc28d5a05b3ebddd80706cfca79
|
[
"MIT"
] | 1
|
2022-03-29T13:37:31.000Z
|
2022-03-29T13:37:31.000Z
|
import numpy as np
import sys, os, time, glob
import pytest
from astropy.io import ascii
from astroquery.mast import Observations
from astropy.io import fits
#sys.path.insert(0, '../src')
sys.path.insert(0, './src')
#sys.path.insert(0, '/home/zieba/Desktop/Projects/Open_source/PACMAN/src/')
#print(sys.path)
from pacman.lib import util
from pacman import s00_table as s00
from pacman import s01_horizons as s01
from pacman import s02_barycorr as s02
from pacman import s03_refspectra as s03
from pacman import s10_direct_images as s10
from pacman import s20_extract as s20
from pacman import s21_bin_spectroscopic_lc as s21
from pacman import s30_run as s30
from pacman.lib import sort_nicely as sn
from pacman.lib.suntimecorr import getcoords as getcoords
from pacman.lib.gaussfitter import gaussfit as gaussfit
from pacman.lib import optextr
from importlib import reload
from astropy.table import Table
from photutils.datasets import (make_noise_image, make_gaussian_sources_image)
test_path = os.path.dirname(os.path.realpath(__file__)) + '/'
eventlabel='GJ1214_13021'
def workdir_finder():
"""
Finds the latest work directory created.
After running the Stage 00 test,
we want to base all following tests (for s01, s02, ...) on the workdirectory created when running s00.
"""
eventlabel='GJ1214_13021'
# list subdirectories in the run directory
dirs = np.array([f.path for f in os.scandir(test_path) if f.is_dir()])
# saves times when these subdirectories were created.
# They always have the following form: 'run_YYYY-MM-DD_HH-MM-SS_eventlabel'
dirs_bool = np.array(['/run_2' in i for i in dirs])
dirs = dirs[dirs_bool]
eventlabel_len = len(eventlabel)
dirs_times = [i[-(eventlabel_len+20):-(eventlabel_len+1)] for i in dirs]
# sort the times
times_sorted = sn.sort_nicely(dirs_times)
# most recent time
recent_time = times_sorted[-1]
# find the directory with that most recent time
idx = 0
for i in range(len(dirs)):
if dirs[i][-(eventlabel_len+20):-(eventlabel_len+1)] == recent_time:
idx = i
workdir = dirs[idx]
#save the eventlabel which is in the directory name too
print('workdir: ', workdir)
print('eventlabel: ', eventlabel)
return (workdir, eventlabel)
def delete_dir(dir_name):
if os.path.exists(dir_name):
print('Old dir found and deleted')
os.system("rm -r {0}".format(dir_name))
@pytest.mark.run(order=1)
def test_sessionstart(capsys):
"""
Called as the first test. It downloads the three HST files used in this test using astroquery.
"""
file_path = os.path.realpath(__file__)
test_dir = os.path.dirname(file_path)
eventlabel='GJ1214_13021'
dirs = np.array([f.path for f in os.scandir(test_path) if f.is_dir()])
dirs_bool = np.array(['/run_2' in i for i in dirs])
dirs = dirs[dirs_bool]
for diri in dirs:
delete_dir(diri)
# delete old data dir
data_dir = test_dir + '/data'
mast_dir = test_dir + '/mastDownload' # Specify root directory to be searched for .sav files.
delete_dir(data_dir)
delete_dir(mast_dir)
# create a data dir
os.makedirs(data_dir)
#search for the HST data
proposal_obs = Observations.query_criteria(proposal_id=13021, instrument_name='WFC3/IR', project='HST')
data_products = Observations.get_product_list(proposal_obs)
select = ['ibxy07p9q', 'ibxy07paq', 'ibxy07pbq'] #just download these three files
data_products_select = []
for j in select:
data_products_select.append((data_products['obs_id'] == j).data)
data_products_new = data_products[np.any(data_products_select, axis=0)]
data_products_ima = data_products_new[data_products_new['productSubGroupDescription'] == 'IMA']
#download the three files
Observations.download_products(data_products_ima, mrp_only=False, download_dir=test_dir)
filelist = []
for tree,fol,fils in os.walk(mast_dir):
filelist.extend([os.path.join(tree,fil) for fil in fils if fil.endswith('.fits')])
for fil in filelist:
name = fil.split('/')[-1]
os.rename(fil, data_dir + '/' + name)
os.system("rm -r {0}".format(mast_dir))
assert True
@pytest.mark.run(order=2)
def test_s00(capsys):
"""
Reads in the downloaded HST files and creates the work directory and the filelist file.
"""
reload(s00)
pcf_path = test_path + '/run_files'
#run s00
meta = s00.run00(eventlabel, pcf_path)
workdir = meta.workdir + '/'
time.sleep(1)
# run assertions
assert os.path.exists(workdir)
assert os.path.exists(workdir+'/figs')
filelist_file = workdir + '/filelist.txt'
assert os.path.exists(filelist_file)
filelist = ascii.read(filelist_file)
ncols = len(filelist[0])
nrows = len(filelist['t_mjd'])
assert np.round(filelist['t_mjd'][0],4) == 56364.5297
assert (nrows, ncols) == (3, 9)
@pytest.mark.run(order=3)
def test_s01(capsys):
"""
Downloads the HORIZONS file.
"""
reload(s01)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s01
meta = s01.run01(eventlabel, workdir)
horizons_file = workdir+'/ancil/horizons/horizons_results_v0.txt'
# run assertions
assert os.path.exists(horizons_file)
def my_round(num):
"""
Cutoff a decimal number after 2 decimal places without rounding.
From: https://stackoverflow.com/questions/967661/python-truncate-after-a-hundreds
"""
return np.float("%.2f" % (int(num*100)/float(100)))
@pytest.mark.run(order=4)
def test_horizons(capsys):
"""
Check the shape of the HORIZONS file.
"""
workdir, eventlabel = workdir_finder()
horizons_file = workdir+'/ancil/horizons/horizons_results_v0.txt'
start_data = '$$SOE'
end_data = '$$EOE'
# Read in whole table as an list of strings, one string per line
ctable = open(horizons_file, 'r')
wholetable = ctable.readlines()
ctable.close()
# Find start and end line
i = 0
# while end has not been found:
while wholetable[i].find(end_data) == -1:
# if start is found get the index of next line:
if wholetable[i].find(start_data) != -1:
start = i + 1
i += 1
# Chop table
data = wholetable[start:i - 2]
# Extract values:
x, y, z, time = getcoords(data)
#checking shape
assert len(x) == 25
#checking first and last values
assert np.all(np.array([my_round(x[0]), my_round(y[0]), my_round(z[0])]) == np.array([-147684997.27, 16573698.09, 7180590.09]))
assert np.all(np.array([my_round(x[-1]), my_round(y[-1]), my_round(z[-1])])== np.array([-147715099.27, 16386308.81, 7090837.98]))
@pytest.mark.run(order=10)
def test_s02(capsys):
"""
Performs the barycentric correction.
"""
reload(s02)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s02
meta = s02.run02(eventlabel, workdir)
filelist_file = workdir + '/filelist.txt'
assert os.path.exists(filelist_file)
filelist = ascii.read(filelist_file)
# Check if the barycentric correction was correctly performed
assert ('t_bjd' in filelist.colnames)
assert np.round(filelist['t_bjd'][0],4) == 2456365.0306
@pytest.mark.run(order=15)
def test_s03(capsys):
"""
Downloads the stellar spectrum and multiplies it with the bandpass.
"""
reload(s03)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s03
meta = s03.run03(eventlabel, workdir)
sm_file = workdir + '/ancil/stellar_models/k93models/kp03_3500.fits'
assert os.path.exists(sm_file)
hdul = fits.open(sm_file)
wvl = hdul[1].data['WAVELENGTH']*1e-10
flux = hdul[1].data['g50']*1e-7*1e4/1e-10/np.pi
#check if for the sm fits file the flux and wavelength is >= 0 everywhere
assert np.all(wvl >= 0)
assert np.all(flux >= 0)
#check the refspec_file
refspec_file = workdir + '/ancil/refspec/refspec.txt'
assert os.path.exists(refspec_file)
wvl_refspec, flux_refspec = np.loadtxt(refspec_file).T
# Check if the refspec was correctly created
assert len(wvl_refspec) == 162
#check if for the refspec file the flux and wavelength is >= 0 everywhere
assert np.all(wvl_refspec >= 0)
assert np.all(flux_refspec >= 0)
@pytest.mark.run(order=16)
def test_s10(capsys):
"""
Determines the position of the direct image.
"""
reload(s10)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s10
meta = s10.run10(eventlabel, workdir)
xrefyref_file = workdir + '/xrefyref.txt'
assert os.path.exists(xrefyref_file)
if os.path.exists(xrefyref_file):
xrefyref = ascii.read(xrefyref_file)
# Check if the direct image position was determined correctly
assert np.round(xrefyref['pos1'][0], 5) == 513.57510
assert np.round(xrefyref['pos2'][0], 5) == 400.90239
@pytest.mark.run(order=26)
def test_sim_source(capsys):
"""
Determines the position of the simulated direct image.
"""
sigma_psf = 1.0
source = Table()
source_xpos = np.random.randint(10, 54)
source_ypos = np.random.randint(10, 54)
source['flux'] = [5000]
source['x_mean'] = [source_xpos]
source['y_mean'] = [source_ypos]
source['x_stddev'] = sigma_psf * np.ones(1)
source['y_stddev'] = source['x_stddev']
source['theta'] = [0]
source['id'] = [1]
tshape = (64, 64)
source_data = make_gaussian_sources_image(tshape, source)
noise_data = make_noise_image(tshape, distribution='gaussian', mean=80.,
stddev=5., seed=123)
image = (source_data + noise_data)
results = gaussfit(image, noise_data)
assert (source_xpos-1 <= results[2] <= source_xpos+1)
assert (source_ypos-1 <= results[3] <= source_ypos+1)
@pytest.mark.run(order=27)
def test_s20(capsys):
"""
The extraction step. Extracts flux as a function of wavelength and time.
"""
reload(s20)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s20
meta = s20.run20(eventlabel, workdir)
extracted_lc_dir_path = workdir + '/extracted_lc'
s20_dir = np.array([f.path for f in os.scandir(extracted_lc_dir_path) if f.is_dir()])[0]
s20_lc_spec_file = s20_dir + '/lc_spec.txt'
s20_lc_white_file = s20_dir + '/lc_white.txt'
#Check if the files were created
assert os.path.exists(s20_lc_spec_file)
assert os.path.exists(s20_lc_white_file)
s20_lc_spec = ascii.read(s20_lc_spec_file)
s20_lc_white = ascii.read(s20_lc_white_file)
#Check the amount of columns
assert len(s20_lc_spec.colnames) == 10
assert len(s20_lc_white.colnames) == 11
#test_optextr
spectrum = np.ones((20,9))
for i in range(len(spectrum)):
for j in range(len(spectrum[0])):
if 4 < i < 8:
if 1 < j < 7:
spectrum[i,j] = 10
err = np.ones((20,9))*0.01
Mnew = np.ones((20,9))
spec_box_0 = 15 * 10
var_box_0 = 1
[f_opt_0, var_opt_0, numoutliers] = optextr.optextr(spectrum, err, spec_box_0, var_box_0, Mnew, meta.nsmooth, meta.sig_cut, meta.save_optextr_plot, 0, 0, meta)
assert np.round(np.sum(f_opt_0), 0) == np.round(np.sum(spectrum), 0) #optimal extraction flux should be the same as the total flux in the array
assert numoutliers == 0 # we didnt introduce any outliers
@pytest.mark.run(order=29)
def test_s21(capsys):
"""
Creates spectroscopic light curves.
"""
reload(s21)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s21
meta = s21.run21(eventlabel, workdir)
extracted_sp_dir_path = workdir + '/extracted_sp'
s21_dir = np.array([f.path for f in os.scandir(extracted_sp_dir_path) if f.is_dir()])[0]
s21_wvl_table_file = s21_dir + '/wvl_table.dat'
assert os.path.exists(s21_wvl_table_file)
s21_wvl_table = ascii.read(s21_wvl_table_file)
wvl_s21 = s21_wvl_table['wavelengths']
#Check if the number of bins defined in the pcf is the same as
#the number of wavelength bins saved into the wvl_table.dat file.
assert meta.wvl_bins == len(wvl_s21)
#Number of light curves should be the same as meta.wvl_bins
extracted_sp_lcs_files = glob.glob(os.path.join(s21_dir, "*.txt"))
assert meta.wvl_bins == len(extracted_sp_lcs_files)
#There should be 10 columns as for the /lc_spec.txt file which was generated after running s20.
extracted_sp_lc_file_0 = sn.sort_nicely(extracted_sp_lcs_files)[0]
extracted_sp_lc_0 = ascii.read(extracted_sp_lc_file_0)
assert len(extracted_sp_lc_0.colnames) == 10
@pytest.mark.run(order=30)
def test_s30(capsys):
"""
Fits spectroscopic light curves.
"""
reload(s30)
time.sleep(1)
workdir, eventlabel = workdir_finder()
#run s30
meta = s30.run30(eventlabel, workdir)
dirs = np.array([f.path for f in os.scandir(workdir) if f.is_dir()])
dirs_bool = np.array([b'fit_' in i for i in dirs])
fit_dirs = dirs[dirs_bool]
fit_dir = fit_dirs[0]
assert os.path.exists(fit_dir)
meta.s30_fit_white = True
meta.s30_most_recent_s20 = True
s30.run30(eventlabel, workdir, meta=meta)
dirs = np.array([f.path for f in os.scandir(workdir) if f.is_dir()])
dirs_bool = np.array([b'fit_' in i for i in dirs])
print('dirs_bool: ', dirs_bool)
assert True
@pytest.mark.run(order=40)
def test_sessionfinish(capsys):
"""
Called after whole test run finished. It will delete the created work directory and the downloaded HST files.
"""
workdir, eventlabel = workdir_finder()
file_path = os.path.realpath(__file__)
test_dir = os.path.dirname(file_path)
data_dir = test_dir + '/data'
os.system("rm -r {0}".format(data_dir))
os.system("rm -r {0}".format(workdir))
print('deleted directories and files again')
assert True
| 28.232323
| 163
| 0.674419
|
518acc300a4a019b9d4ed01215543bf47f4ae1bf
| 71,021
|
py
|
Python
|
scripts/api_dump_generator.py
|
g-keen/VulkanTools
|
231ea2844079c92ea74e2750e8cb3fd1c0434152
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
scripts/api_dump_generator.py
|
g-keen/VulkanTools
|
231ea2844079c92ea74e2750e8cb3fd1c0434152
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
scripts/api_dump_generator.py
|
g-keen/VulkanTools
|
231ea2844079c92ea74e2750e8cb3fd1c0434152
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python3 -i
#
# Copyright (c) 2015-2016 Valve Corporation
# Copyright (c) 2015-2016 LunarG, Inc.
# Copyright (c) 2015-2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Lenny Komow <lenny@lunarg.com>
#
# The API dump layer works by passing custom format strings to the ApiDumpGenerator. These format
# strings are C++ code, with 3-ish exceptions:
# * Anything beginning with @ will be expanded by the ApiDumpGenerator. These are used to allow
# iteration over various items within the Vulkan spec, usch as functions, enums, etc.
# * Anything surrounded by { and } will be substituted when the ApiDumpGenerator expands the @
# directives. This gives a way to get things like data types or names for anything that can
# be iterated over in an @ directive.
# * Curly braces must be doubled like {{ for a single curly brace to appear in the output code.
#
# The API dump uses separate format strings for each output file, but passes them to a common
# generator. This allows greater flexibility, as changing the output codegen means just changing
# the corresponding format string.
#
# Currently, the API dump layer generates the following files from the following strings:
# * api_dump.cpp: COMMON_CODEGEN - Provides all entrypoints for functions and dispatches the calls
# to the proper back end
# * api_dump_text.h: TEXT_CODEGEN - Provides the back end for dumping to a text file
#
import generator as gen
import re
import sys
import xml.etree;
COMMON_CODEGEN = """
/* Copyright (c) 2015-2016 Valve Corporation
* Copyright (c) 2015-2016 LunarG, Inc.
* Copyright (c) 2015-2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Lenny Komow <lenny@lunarg.com>
*/
/*
* This file is generated from the Khronos Vulkan XML API Registry.
*/
#include "api_dump_text.h"
#include "api_dump_html.h"
//============================= Dump Functions ==============================//
@foreach function where('{funcReturn}' != 'void')
inline void dump_{funcName}(ApiDumpInstance& dump_inst, {funcReturn} result, {funcTypedParams})
{{
loader_platform_thread_lock_mutex(dump_inst.outputMutex());
switch(dump_inst.settings().format())
{{
case ApiDumpFormat::Text:
dump_text_{funcName}(dump_inst, result, {funcNamedParams});
break;
case ApiDumpFormat::Html:
dump_html_{funcName}(dump_inst, result, {funcNamedParams});
break;
}}
loader_platform_thread_unlock_mutex(dump_inst.outputMutex());
}}
@end function
@foreach function where('{funcReturn}' == 'void')
inline void dump_{funcName}(ApiDumpInstance& dump_inst, {funcTypedParams})
{{
loader_platform_thread_lock_mutex(dump_inst.outputMutex());
switch(dump_inst.settings().format())
{{
case ApiDumpFormat::Text:
dump_text_{funcName}(dump_inst, {funcNamedParams});
break;
case ApiDumpFormat::Html:
dump_html_{funcName}(dump_inst, {funcNamedParams});
break;
}}
loader_platform_thread_unlock_mutex(dump_inst.outputMutex());
}}
@end function
//============================= API EntryPoints =============================//
// Specifically implemented functions
@foreach function where('{funcName}' == 'vkCreateInstance')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
// Get the function pointer
VkLayerInstanceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo != 0);
PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
assert(fpGetInstanceProcAddr != 0);
PFN_vkCreateInstance fpCreateInstance = (PFN_vkCreateInstance) fpGetInstanceProcAddr(NULL, "vkCreateInstance");
if(fpCreateInstance == NULL) {{
return VK_ERROR_INITIALIZATION_FAILED;
}}
// Call the function and create the dispatch table
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
{funcReturn} result = fpCreateInstance({funcNamedParams});
if(result == VK_SUCCESS) {{
initInstanceTable(*pInstance, fpGetInstanceProcAddr);
}}
{funcStateTrackingCode}
// Output the API dump
dump_{funcName}(ApiDumpInstance::current(), result, {funcNamedParams});
return result;
}}
@end function
@foreach function where('{funcName}' == 'vkDestroyInstance')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
// Destroy the dispatch table
dispatch_key key = get_dispatch_key({funcDispatchParam});
instance_dispatch_table({funcDispatchParam})->DestroyInstance({funcNamedParams});
destroy_instance_dispatch_table(key);
{funcStateTrackingCode}
// Output the API dump
dump_{funcName}(ApiDumpInstance::current(), {funcNamedParams});
}}
@end function
@foreach function where('{funcName}' == 'vkCreateDevice')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
// Get the function pointer
VkLayerDeviceCreateInfo* chain_info = get_chain_info(pCreateInfo, VK_LAYER_LINK_INFO);
assert(chain_info->u.pLayerInfo != 0);
PFN_vkGetInstanceProcAddr fpGetInstanceProcAddr = chain_info->u.pLayerInfo->pfnNextGetInstanceProcAddr;
PFN_vkGetDeviceProcAddr fpGetDeviceProcAddr = chain_info->u.pLayerInfo->pfnNextGetDeviceProcAddr;
PFN_vkCreateDevice fpCreateDevice = (PFN_vkCreateDevice) fpGetInstanceProcAddr(NULL, "vkCreateDevice");
if(fpCreateDevice == NULL) {{
return VK_ERROR_INITIALIZATION_FAILED;
}}
// Call the function and create the dispatch table
chain_info->u.pLayerInfo = chain_info->u.pLayerInfo->pNext;
{funcReturn} result = fpCreateDevice({funcNamedParams});
if(result == VK_SUCCESS) {{
initDeviceTable(*pDevice, fpGetDeviceProcAddr);
}}
{funcStateTrackingCode}
// Output the API dump
dump_{funcName}(ApiDumpInstance::current(), result, {funcNamedParams});
return result;
}}
@end function
@foreach function where('{funcName}' == 'vkDestroyDevice')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
// Destroy the dispatch table
dispatch_key key = get_dispatch_key({funcDispatchParam});
device_dispatch_table({funcDispatchParam})->DestroyDevice({funcNamedParams});
destroy_device_dispatch_table(key);
{funcStateTrackingCode}
// Output the API dump
dump_{funcName}(ApiDumpInstance::current(), {funcNamedParams});
}}
@end function
@foreach function where('{funcName}' == 'vkEnumerateInstanceExtensionProperties')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
return util_GetExtensionProperties(0, NULL, pPropertyCount, pProperties);
}}
@end function
@foreach function where('{funcName}' == 'vkEnumerateInstanceLayerProperties')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
static const VkLayerProperties layerProperties[] = {{
{{
"VK_LAYER_LUNARG_api_dump",
VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION), // specVersion
VK_MAKE_VERSION(0, 2, 0), // implementationVersion
"layer: api_dump",
}}
}};
return util_GetLayerProperties(ARRAY_SIZE(layerProperties), layerProperties, pPropertyCount, pProperties);
}}
@end function
@foreach function where('{funcName}' == 'vkEnumerateDeviceLayerProperties')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
static const VkLayerProperties layerProperties[] = {{
{{
"VK_LAYER_LUNARG_api_dump",
VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION),
VK_MAKE_VERSION(0, 2, 0),
"layer: api_dump",
}}
}};
return util_GetLayerProperties(ARRAY_SIZE(layerProperties), layerProperties, pPropertyCount, pProperties);
}}
@end function
@foreach function where('{funcName}' == 'vkQueuePresentKHR')
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
{funcReturn} result = device_dispatch_table({funcDispatchParam})->{funcShortName}({funcNamedParams});
{funcStateTrackingCode}
dump_{funcName}(ApiDumpInstance::current(), result, {funcNamedParams});
ApiDumpInstance::current().nextFrame();
return result;
}}
@end function
// Autogen instance functions
@foreach function where('{funcType}' == 'instance' and '{funcReturn}' != 'void' and '{funcName}' not in ['vkCreateInstance', 'vkDestroyInstance', 'vkCreateDevice', 'vkGetInstanceProcAddr', 'vkEnumerateDeviceExtensionProperties', 'vkEnumerateDeviceLayerProperties'])
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
{funcReturn} result = instance_dispatch_table({funcDispatchParam})->{funcShortName}({funcNamedParams});
{funcStateTrackingCode}
dump_{funcName}(ApiDumpInstance::current(), result, {funcNamedParams});
return result;
}}
@end function
@foreach function where('{funcType}' == 'instance' and '{funcReturn}' == 'void' and '{funcName}' not in ['vkCreateInstance', 'vkDestroyInstance', 'vkCreateDevice', 'vkGetInstanceProcAddr', 'vkEnumerateDeviceExtensionProperties', 'vkEnumerateDeviceLayerProperties'])
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
instance_dispatch_table({funcDispatchParam})->{funcShortName}({funcNamedParams});
{funcStateTrackingCode}
dump_{funcName}(ApiDumpInstance::current(), {funcNamedParams});
}}
@end function
// Autogen device functions
@foreach function where('{funcType}' == 'device' and '{funcReturn}' != 'void' and '{funcName}' not in ['vkDestroyDevice', 'vkEnumerateInstanceExtensionProperties', 'vkEnumerateInstanceLayerProperties', 'vkQueuePresentKHR', 'vkGetDeviceProcAddr'])
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
{funcReturn} result = device_dispatch_table({funcDispatchParam})->{funcShortName}({funcNamedParams});
{funcStateTrackingCode}
dump_{funcName}(ApiDumpInstance::current(), result, {funcNamedParams});
return result;
}}
@end function
@foreach function where('{funcType}' == 'device' and '{funcReturn}' == 'void' and '{funcName}' not in ['vkDestroyDevice', 'vkEnumerateInstanceExtensionProperties', 'vkEnumerateInstanceLayerProperties', 'vkGetDeviceProcAddr'])
VK_LAYER_EXPORT VKAPI_ATTR {funcReturn} VKAPI_CALL {funcName}({funcTypedParams})
{{
device_dispatch_table({funcDispatchParam})->{funcShortName}({funcNamedParams});
{funcStateTrackingCode}
dump_{funcName}(ApiDumpInstance::current(), {funcNamedParams});
}}
@end function
VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* pName)
{{
@foreach function where('{funcType}' == 'instance' and '{funcName}' not in [ 'vkEnumerateDeviceExtensionProperties' ])
if(strcmp(pName, "{funcName}") == 0)
return reinterpret_cast<PFN_vkVoidFunction>({funcName});
@end function
if(instance_dispatch_table(instance)->GetInstanceProcAddr == NULL)
return NULL;
return instance_dispatch_table(instance)->GetInstanceProcAddr(instance, pName);
}}
VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* pName)
{{
@foreach function where('{funcType}' == 'device')
if(strcmp(pName, "{funcName}") == 0)
return reinterpret_cast<PFN_vkVoidFunction>({funcName});
@end function
if(device_dispatch_table(device)->GetDeviceProcAddr == NULL)
return NULL;
return device_dispatch_table(device)->GetDeviceProcAddr(device, pName);
}}
"""
TEXT_CODEGEN = """
/* Copyright (c) 2015-2016 Valve Corporation
* Copyright (c) 2015-2016 LunarG, Inc.
* Copyright (c) 2015-2016 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Lenny Komow <lenny@lunarg.com>
*/
/*
* This file is generated from the Khronos Vulkan XML API Registry.
*/
#pragma once
#include "api_dump.h"
@foreach struct
std::ostream& dump_text_{sctName}(const {sctName}& object, const ApiDumpSettings& settings, int indents{sctConditionVars});
@end struct
@foreach union
std::ostream& dump_text_{unName}(const {unName}& object, const ApiDumpSettings& settings, int indents);
@end union
//=========================== Type Implementations ==========================//
@foreach type where('{etyName}' != 'void')
inline std::ostream& dump_text_{etyName}({etyName} object, const ApiDumpSettings& settings, int indents)
{{
@if('{etyName}' != 'uint8_t')
return settings.stream() << object;
@end if
@if('{etyName}' == 'uint8_t')
return settings.stream() << (uint32_t) object;
@end if
}}
@end type
//========================= Basetype Implementations ========================//
@foreach basetype
inline std::ostream& dump_text_{baseName}({baseName} object, const ApiDumpSettings& settings, int indents)
{{
return settings.stream() << object;
}}
@end basetype
//======================= System Type Implementations =======================//
@foreach systype
inline std::ostream& dump_text_{sysName}(const {sysType} object, const ApiDumpSettings& settings, int indents)
{{
return settings.stream() << object;
}}
@end systype
//========================== Handle Implementations =========================//
@foreach handle
inline std::ostream& dump_text_{hdlName}(const {hdlName} object, const ApiDumpSettings& settings, int indents)
{{
if(settings.showAddress())
return settings.stream() << object;
else
return settings.stream() << "address";
}}
@end handle
//=========================== Enum Implementations ==========================//
@foreach enum
std::ostream& dump_text_{enumName}({enumName} object, const ApiDumpSettings& settings, int indents)
{{
switch((int64_t) object)
{{
@foreach option
case {optValue}:
settings.stream() << "{optName} (";
break;
@end option
default:
settings.stream() << "UNKNOWN (";
}}
return settings.stream() << object << ")";
}}
@end enum
//========================= Bitmask Implementations =========================//
@foreach bitmask
std::ostream& dump_text_{bitName}({bitName} object, const ApiDumpSettings& settings, int indents)
{{
bool is_first = true;
//settings.formatNameType(stream, indents, name, type_string) << object;
settings.stream() << object;
@foreach option
if(object & {optValue})
is_first = dump_text_bitmaskOption("{optName}", settings.stream(), is_first);
@end option
if(!is_first)
settings.stream() << ")";
return settings.stream();
}}
@end bitmask
//=========================== Flag Implementations ==========================//
@foreach flag where('{flagEnum}' != 'None')
inline std::ostream& dump_text_{flagName}({flagName} object, const ApiDumpSettings& settings, int indents)
{{
return dump_text_{flagEnum}(({flagEnum}) object, settings, indents);
}}
@end flag
@foreach flag where('{flagEnum}' == 'None')
inline std::ostream& dump_text_{flagName}({flagName} object, const ApiDumpSettings& settings, int indents)
{{
return settings.stream() << object;
}}
@end flag
//======================= Func Pointer Implementations ======================//
@foreach funcpointer
inline std::ostream& dump_text_{pfnName}({pfnName} object, const ApiDumpSettings& settings, int indents)
{{
if(settings.showAddress())
return settings.stream() << object;
else
return settings.stream() << "address";
}}
@end funcpointer
//========================== Struct Implementations =========================//
@foreach struct where('{sctName}' != 'VkShaderModuleCreateInfo')
std::ostream& dump_text_{sctName}(const {sctName}& object, const ApiDumpSettings& settings, int indents{sctConditionVars})
{{
if(settings.showAddress())
settings.stream() << &object << ":\\n";
else
settings.stream() << "address:\\n";
@foreach member
@if('{memCondition}' != 'None')
if({memCondition})
@end if
@if({memPtrLevel} == 0)
dump_text_value<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' == 'None')
dump_text_pointer<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and not {memLengthIsMember})
dump_text_array<const {memBaseType}>(object.{memName}, {memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and {memLengthIsMember})
dump_text_array<const {memBaseType}>(object.{memName}, object.{memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if('{memCondition}' != 'None')
else
dump_text_special("UNUSED", settings, "{memType}", "{memName}", indents + 1);
@end if
@end member
return settings.stream();
}}
@end struct
@foreach struct where('{sctName}' == 'VkShaderModuleCreateInfo')
std::ostream& dump_text_{sctName}(const {sctName}& object, const ApiDumpSettings& settings, int indents{sctConditionVars})
{{
if(settings.showAddress())
settings.stream() << &object << ":\\n";
else
settings.stream() << "address:\\n";
@foreach member
@if('{memCondition}' != 'None')
if({memCondition})
@end if
@if({memPtrLevel} == 0)
dump_text_value<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' == 'None')
dump_text_pointer<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and not {memLengthIsMember} and '{memName}' != 'pCode')
dump_text_array<const {memBaseType}>(object.{memName}, {memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and {memLengthIsMember} and '{memName}' != 'pCode')
dump_text_array<const {memBaseType}>(object.{memName}, object.{memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
@end if
@if('{memName}' == 'pCode')
if(settings.showShader())
dump_text_array<const {memBaseType}>(object.{memName}, object.{memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_text_{memTypeID}{memInheritedConditions});
else
dump_text_special("SHADER DATA", settings, "{memType}", "{memName}", indents + 1);
@end if
@if('{memCondition}' != 'None')
else
dump_text_special("UNUSED", settings, "{memType}", "{memName}", indents + 1);
@end if
@end member
return settings.stream();
}}
@end struct
//========================== Union Implementations ==========================//
@foreach union
std::ostream& dump_text_{unName}(const {unName}& object, const ApiDumpSettings& settings, int indents)
{{
if(settings.showAddress())
settings.stream() << &object << " (Union):\\n";
else
settings.stream() << "address (Union):\\n";
@foreach choice
@if({chcPtrLevel} == 0)
dump_text_value<const {chcBaseType}>(object.{chcName}, settings, "{chcType}", "{chcName}", indents + 1, dump_text_{chcTypeID});
@end if
@if({chcPtrLevel} == 1 and '{chcLength}' == 'None')
dump_text_pointer<const {chcBaseType}>(object.{chcName}, settings, "{chcType}", "{chcName}", indents + 1, dump_text_{chcTypeID});
@end if
@if({chcPtrLevel} == 1 and '{chcLength}' != 'None')
dump_text_array<const {chcBaseType}>(object.{chcName}, {chcLength}, settings, "{chcType}", "{chcChildType}", "{chcName}", indents + 1, dump_text_{chcTypeID});
@end if
@end choice
return settings.stream();
}}
@end union
//========================= Function Implementations ========================//
@foreach function where('{funcReturn}' != 'void')
std::ostream& dump_text_{funcName}(ApiDumpInstance& dump_inst, {funcReturn} result, {funcTypedParams})
{{
const ApiDumpSettings& settings(dump_inst.settings());
settings.stream() << "Thread " << dump_inst.threadID() << ", Frame " << dump_inst.frameCount() << ":\\n";
settings.stream() << "{funcName}({funcNamedParams}) returns {funcReturn} ";
dump_text_{funcReturn}(result, settings, 0) << ":\\n";
if(settings.showParams())
{{
@foreach parameter
@if({prmPtrLevel} == 0)
dump_text_value<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_text_{prmTypeID}{prmInheritedConditions});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' == 'None')
dump_text_pointer<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_text_{prmTypeID}{prmInheritedConditions});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' != 'None')
dump_text_array<const {prmBaseType}>({prmName}, {prmLength}, settings, "{prmType}", "{prmChildType}", "{prmName}", 1, dump_text_{prmTypeID}{prmInheritedConditions});
@end if
@end parameter
}}
settings.shouldFlush() ? settings.stream() << std::endl : settings.stream() << "\\n";
return settings.stream();
}}
@end function
@foreach function where('{funcReturn}' == 'void')
std::ostream& dump_text_{funcName}(ApiDumpInstance& dump_inst, {funcTypedParams})
{{
const ApiDumpSettings& settings(dump_inst.settings());
settings.stream() << "Thread " << dump_inst.threadID() << ", Frame " << dump_inst.frameCount() << ":\\n";
settings.stream() << "{funcName}({funcNamedParams}) returns {funcReturn}:\\n";
if(settings.showParams())
{{
@foreach parameter
@if({prmPtrLevel} == 0)
dump_text_value<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_text_{prmTypeID}{prmInheritedConditions});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' == 'None')
dump_text_pointer<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_text_{prmTypeID}{prmInheritedConditions});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' != 'None')
dump_text_array<const {prmBaseType}>({prmName}, {prmLength}, settings, "{prmType}", "{prmChildType}", "{prmName}", 1, dump_text_{prmTypeID}{prmInheritedConditions});
@end if
@end parameter
}}
settings.shouldFlush() ? settings.stream() << std::endl : settings.stream() << "\\n";
return settings.stream();
}}
@end function
"""
# This HTML Codegen is essentially copied from the format above.
# Due to the way some of the functions have been organized, some of the HTML tags
# that are opened are closed in another function. See api_dump.h. This may need refactoring.
HTML_CODEGEN = """
/* Copyright (c) 2015-2017 Valve Corporation
* Copyright (c) 2015-2017 LunarG, Inc.
* Copyright (c) 2015-2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author: Lenny Komow <lenny@lunarg.com>
* Author: Joey Bzdek <joey@lunarg.com>
*/
/*
* This file is generated from the Khronos Vulkan XML API Registry.
*/
#pragma once
#include "api_dump.h"
@foreach struct
std::ostream& dump_html_{sctName}(const {sctName}& object, const ApiDumpSettings& settings, int indents{sctConditionVars});
@end struct
@foreach union
std::ostream& dump_html_{unName}(const {unName}& object, const ApiDumpSettings& settings, int indents);
@end union
//=========================== Type Implementations ==========================//
@foreach type where('{etyName}' != 'void')
inline std::ostream& dump_html_{etyName}({etyName} object, const ApiDumpSettings& settings, int indents)
{{
settings.stream() << "<div class='val'>";
@if('{etyName}' != 'uint8_t')
settings.stream() << object;
@end if
@if('{etyName}' == 'uint8_t')
settings.stream() << (uint32_t) object;
@end if
return settings.stream() << "</div></summary>";
}}
@end type
//========================= Basetype Implementations ========================//
@foreach basetype
inline std::ostream& dump_html_{baseName}({baseName} object, const ApiDumpSettings& settings, int indents)
{{
return settings.stream() << "<div class='val'>" << object << "</div></summary>";
}}
@end basetype
//======================= System Type Implementations =======================//
@foreach systype
inline std::ostream& dump_html_{sysName}(const {sysType} object, const ApiDumpSettings& settings, int indents)
{{
return settings.stream() << "<div class='val'>" << object << "</div></summary>";
}}
@end systype
//========================== Handle Implementations =========================//
@foreach handle
inline std::ostream& dump_html_{hdlName}(const {hdlName} object, const ApiDumpSettings& settings, int indents)
{{
settings.stream() << "<div class='val'>";
if(settings.showAddress())
settings.stream() << object;
else
settings.stream() << "address";
return settings.stream() << "</div></summary>";
}}
@end handle
//=========================== Enum Implementations ==========================//
@foreach enum
std::ostream& dump_html_{enumName}({enumName} object, const ApiDumpSettings& settings, int indents)
{{
settings.stream() << "<div class='val'>";
switch((int64_t) object)
{{
@foreach option
case {optValue}:
settings.stream() << "{optName} (";
break;
@end option
default:
settings.stream() << "UNKNOWN (";
}}
return settings.stream() << object << ")</div></summary>";
}}
@end enum
//========================= Bitmask Implementations =========================//
@foreach bitmask
std::ostream& dump_html_{bitName}({bitName} object, const ApiDumpSettings& settings, int indents)
{{
settings.stream() << "<div class=\'val\'>";
bool is_first = true;
settings.stream() << object;
@foreach option
if(object & {optValue})
is_first = dump_html_bitmaskOption("{optName}", settings.stream(), is_first);
@end option
if(!is_first)
settings.stream() << ")";
return settings.stream() << "</div></summary>";
}}
@end bitmask
//=========================== Flag Implementations ==========================//
@foreach flag where('{flagEnum}' != 'None')
inline std::ostream& dump_html_{flagName}({flagName} object, const ApiDumpSettings& settings, int indents)
{{
return dump_html_{flagEnum}(({flagEnum}) object, settings, indents);
}}
@end flag
@foreach flag where('{flagEnum}' == 'None')
inline std::ostream& dump_html_{flagName}({flagName} object, const ApiDumpSettings& settings, int indents)
{{
return settings.stream() << "<div class=\'val\'>"
<< object << "</div></summary>";
}}
@end flag
//======================= Func Pointer Implementations ======================//
@foreach funcpointer
inline std::ostream& dump_html_{pfnName}({pfnName} object, const ApiDumpSettings& settings, int indents)
{{
settings.stream() << "<div class=\'val\'>";
if(settings.showAddress())
settings.stream() << object;
else
settings.stream() << "address";
return settings.stream() << "</div></summary>";
}}
@end funcpointer
//========================== Struct Implementations =========================//
@foreach struct where('{sctName}' != 'VkShaderModuleCreateInfo')
std::ostream& dump_html_{sctName}(const {sctName}& object, const ApiDumpSettings& settings, int indents{sctConditionVars})
{{
settings.stream() << "<div class=\'val\'>";
if(settings.showAddress())
settings.stream() << &object << "\\n";
else
settings.stream() << "address\\n";
settings.stream() << "</div></summary>";
@foreach member
@if('{memCondition}' != 'None')
if({memCondition})
@end if
@if({memPtrLevel} == 0)
dump_html_value<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' == 'None')
dump_html_pointer<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and not {memLengthIsMember})
dump_html_array<const {memBaseType}>(object.{memName}, {memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and {memLengthIsMember})
dump_html_array<const {memBaseType}>(object.{memName}, object.{memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if('{memCondition}' != 'None')
else
dump_html_special("UNUSED", settings, "{memType}", "{memName}", indents + 1);
@end if
@end member
return settings.stream();
}}
@end struct
@foreach struct where('{sctName}' == 'VkShaderModuleCreateInfo')
std::ostream& dump_html_{sctName}(const {sctName}& object, const ApiDumpSettings& settings, int indents{sctConditionVars})
{{
settings.stream() << "<div class='val'>";
if(settings.showAddress())
settings.stream() << &object << "\\n";
else
settings.stream() << "address\\n";
settings.stream() << "</div></summary>";
@foreach member
@if('{memCondition}' != 'None')
if({memCondition})
@end if
@if({memPtrLevel} == 0)
dump_html_value<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' == 'None')
dump_html_pointer<const {memBaseType}>(object.{memName}, settings, "{memType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and not {memLengthIsMember} and '{memName}' != 'pCode')
dump_html_array<const {memBaseType}>(object.{memName}, {memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if({memPtrLevel} == 1 and '{memLength}' != 'None' and {memLengthIsMember} and '{memName}' != 'pCode')
dump_html_array<const {memBaseType}>(object.{memName}, object.{memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
@end if
@if('{memName}' == 'pCode')
if(settings.showShader())
dump_html_array<const {memBaseType}>(object.{memName}, object.{memLength}, settings, "{memType}", "{memChildType}", "{memName}", indents + 1, dump_html_{memTypeID}{memInheritedConditions});
else
dump_html_special("SHADER DATA", settings, "{memType}", "{memName}", indents + 1);
@end if
@if('{memCondition}' != 'None')
else
dump_html_special("UNUSED", settings, "{memType}", "{memName}", indents + 1);
@end if
@end member
return settings.stream();
}}
@end struct
//========================== Union Implementations ==========================//
@foreach union
std::ostream& dump_html_{unName}(const {unName}& object, const ApiDumpSettings& settings, int indents)
{{
settings.stream() << "<div class='val'>";
if(settings.showAddress())
settings.stream() << &object << " (Union):\\n";
else
settings.stream() << "address (Union):\\n";
settings.stream() << "</div></summary>";
@foreach choice
@if({chcPtrLevel} == 0)
dump_html_value<const {chcBaseType}>(object.{chcName}, settings, "{chcType}", "{chcName}", indents + 1, dump_html_{chcTypeID});
@end if
@if({chcPtrLevel} == 1 and '{chcLength}' == 'None')
dump_html_pointer<const {chcBaseType}>(object.{chcName}, settings, "{chcType}", "{chcName}", indents + 1, dump_html_{chcTypeID});
@end if
@if({chcPtrLevel} == 1 and '{chcLength}' != 'None')
dump_html_array<const {chcBaseType}>(object.{chcName}, {chcLength}, settings, "{chcType}", "{chcChildType}", "{chcName}", indents + 1, dump_html_{chcTypeID});
@end if
@end choice
return settings.stream();
}}
@end union
//========================= Function Implementations ========================//
uint64_t next_frame = 0;
@foreach function where('{funcReturn}' != 'void')
std::ostream& dump_html_{funcName}(ApiDumpInstance& dump_inst, {funcReturn} result, {funcTypedParams})
{{
const ApiDumpSettings& settings(dump_inst.settings());
uint64_t current_frame = dump_inst.frameCount();
if (current_frame == next_frame) {{
if (next_frame > 0) {{
settings.stream() << "</details>";
}}
settings.stream() << "<details class='frm'><summary>Frame " << current_frame << "</summary>";
next_frame++;
}}
settings.stream() << "<div class='thd'>Thread " << dump_inst.threadID() << ":</div>";
settings.stream() << "<details class='fn'><summary>";
dump_html_nametype(settings.stream(), settings.showType(), "{funcName}({funcNamedParams})", "{funcReturn}");
dump_html_{funcReturn}(result, settings, 0);
settings.stream() << "</summary>";
if(settings.showParams())
{{
@foreach parameter
@if({prmPtrLevel} == 0)
dump_html_value<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_html_{prmTypeID}{prmInheritedConditions});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' == 'None')
dump_html_pointer<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_html_{prmTypeID}{prmInheritedConditions});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' != 'None')
dump_html_array<const {prmBaseType}>({prmName}, {prmLength}, settings, "{prmType}", "{prmChildType}", "{prmName}", 1, dump_html_{prmTypeID}{prmInheritedConditions});
@end if
@end parameter
}}
settings.shouldFlush() ? settings.stream() << std::endl : settings.stream() << "\\n";
return settings.stream() << "</details>";
}}
@end function
@foreach function where('{funcReturn}' == 'void')
std::ostream& dump_html_{funcName}(ApiDumpInstance& dump_inst, {funcTypedParams})
{{
const ApiDumpSettings& settings(dump_inst.settings());
uint64_t current_frame = dump_inst.frameCount();
if (current_frame == next_frame) {{
if (next_frame > 0) {{
settings.stream() << "</details>";
}}
settings.stream() << "<details class='frm'><summary>Frame " << current_frame << "</summary>";
next_frame++;
}}
settings.stream() << "<div class='thd'>Thread " << dump_inst.threadID() << ":</div>";
settings.stream() << "<details class='fn'><summary>";
dump_html_nametype(settings.stream(), settings.showType(), "{funcName}({funcNamedParams})", "{funcReturn}");
settings.stream() << "</summary>";
if(settings.showParams())
{{
@foreach parameter
@if({prmPtrLevel} == 0)
dump_html_value<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_html_{prmTypeID});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' == 'None')
dump_html_pointer<const {prmBaseType}>({prmName}, settings, "{prmType}", "{prmName}", 1, dump_html_{prmTypeID});
@end if
@if({prmPtrLevel} == 1 and '{prmLength}' != 'None')
dump_html_array<const {prmBaseType}>({prmName}, {prmLength}, settings, "{prmType}", "{prmChildType}", "{prmName}", 1, dump_html_{prmTypeID});
@end if
@end parameter
}}
settings.shouldFlush() ? settings.stream() << std::endl : settings.stream() << "\\n";
return settings.stream() << "</details>";
}}
@end function
"""
POINTER_TYPES = ['void', 'xcb_connection_t', 'Display', 'SECURITY_ATTRIBUTES', 'ANativeWindow']
TRACKED_STATE = {
'vkAllocateCommandBuffers':
'if(result == VK_SUCCESS)\n' +
'ApiDumpInstance::current().addCmdBuffers(\n' +
'device,\n' +
'pAllocateInfo->commandPool,\n' +
'std::vector<VkCommandBuffer>(pCommandBuffers, pCommandBuffers + pAllocateInfo->commandBufferCount),\n' +
'pAllocateInfo->level\n'
');',
'vkDestroyCommandPool':
'ApiDumpInstance::current().eraseCmdBufferPool(device, commandPool);'
,
'vkFreeCommandBuffers':
'ApiDumpInstance::current().eraseCmdBuffers(device, commandPool, std::vector<VkCommandBuffer>(pCommandBuffers, pCommandBuffers + commandBufferCount));'
,
}
INHERITED_STATE = {
'VkPipelineViewportStateCreateInfo': {
'VkGraphicsPipelineCreateInfo': [
{
'name': 'is_dynamic_viewport',
'type': 'bool',
'expr':
'object.pDynamicState && ' +
'std::count(' +
'object.pDynamicState->pDynamicStates, ' +
'object.pDynamicState->pDynamicStates + object.pDynamicState->dynamicStateCount, ' +
'VK_DYNAMIC_STATE_VIEWPORT' +
')',
},
{
'name':'is_dynamic_scissor',
'type': 'bool',
'expr':
'object.pDynamicState && ' +
'std::count(' +
'object.pDynamicState->pDynamicStates, ' +
'object.pDynamicState->pDynamicStates + object.pDynamicState->dynamicStateCount, ' +
'VK_DYNAMIC_STATE_SCISSOR' +
')',
},
],
},
'VkCommandBufferBeginInfo': {
'vkBeginCommandBuffer': [
{
'name': 'cmd_buffer',
'type': 'VkCommandBuffer',
'expr': 'commandBuffer',
},
],
},
}
VALIDITY_CHECKS = {
'VkBufferCreateInfo': {
'pQueueFamilyIndices': 'object.sharingMode == VK_SHARING_MODE_CONCURRENT',
},
'VkCommandBufferBeginInfo': {
# Tracked state ApiDumpInstance, and inherited cmd_buffer
'pInheritanceInfo': 'ApiDumpInstance::current().getCmdBufferLevel(cmd_buffer) == VK_COMMAND_BUFFER_LEVEL_SECONDARY',
},
'VkDescriptorSetLayoutBinding': {
'pImmutableSamplers':
'(object.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)',
},
'VkImageCreateInfo': {
'pQueueFamilyIndices': 'object.sharingMode == VK_SHARING_MODE_CONCURRENT',
},
'VkPipelineViewportStateCreateInfo': {
'pViewports': '!is_dynamic_viewport', # Inherited state variable is_dynamic_viewport
'pScissors': '!is_dynamic_scissor', # Inherited state variable is_dynamic_scissor
},
'VkSwapchainCreateInfoKHR': {
'pQueueFamilyIndices': 'object.imageSharingMode == VK_SHARING_MODE_CONCURRENT',
},
'VkWriteDescriptorSet': {
'pImageInfo':
'(object.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)',
'pBufferInfo':
'(object.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC)',
'pTexelBufferView':
'(object.descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) || ' +
'(object.descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)',
},
}
class ApiDumpGeneratorOptions(gen.GeneratorOptions):
def __init__(self,
input = None,
filename = None,
directory = '.',
apiname = None,
profile = None,
versions = '.*',
emitversions = '.*',
defaultExtensions = None,
addExtensions = None,
removeExtensions = None,
sortProcedure = None,
prefixText = "",
genFuncPointers = True,
protectFile = True,
protectFeature = True,
protectProto = None,
protectProtoStr = None,
apicall = '',
apientry = '',
apientryp = '',
indentFuncProto = True,
indentFuncPointer = False,
alignFuncParam = 0):
gen.GeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions, sortProcedure)
self.input = input
self.prefixText = prefixText
self.genFuncPointers = genFuncPointers
self.protectFile = protectFile
self.protectFeature = protectFeature
self.protectProto = protectProto
self.protectProtoStr = protectProtoStr
self.apicall = apicall
self.apientry = apientry
self.apientryp = apientryp
self.indentFuncProto = indentFuncProto
self.indentFuncPointer = indentFuncPointer
self.alignFuncParam = alignFuncParam
class ApiDumpOutputGenerator(gen.OutputGenerator):
def __init__(self,
errFile = sys.stderr,
warnFile = sys.stderr,
diagFile = sys.stdout,
registryFile = None):
gen.OutputGenerator.__init__(self, errFile, warnFile, diagFile)
self.format = None
self.constants = {}
self.extensions = set()
self.extFuncs = {}
self.extTypes = {}
self.includes = {}
self.basetypes = set()
self.bitmasks = set()
self.enums = set()
self.externalTypes = set()
self.flags = set()
self.funcPointers = set()
self.functions = set()
self.handles = set()
self.structs = set()
self.unions = set()
self.registryFile = registryFile
def beginFile(self, genOpts):
gen.OutputGenerator.beginFile(self, genOpts)
self.format = genOpts.input
if self.registryFile != None:
root = xml.etree.ElementTree.parse(self.registryFile)
else:
root = self.registry.reg
for node in root.find('extensions').findall('extension'):
ext = VulkanExtension(node)
self.extensions.add(ext)
for item in ext.vktypes:
self.extTypes[item] = ext
for item in ext.vkfuncs:
self.extFuncs[item] = ext
for node in self.registry.reg.findall('enums'):
if node.get('name') == 'API Constants':
for item in node.findall('enum'):
self.constants[item.get('name')] = item.get('value')
for node in self.registry.reg.find('types').findall('type'):
if node.get('category') == 'include':
self.includes[node.get('name')] = ''.join(node.itertext())
def endFile(self):
# Find all of the extensions that use the system types
self.sysTypes = set()
for node in self.registry.reg.find('types').findall('type'):
if node.get('category') == None and node.get('requires') in self.includes and node.get('requires') != 'vk_platform':
for extension in self.extTypes:
for structName in self.extTypes[extension].vktypes:
for struct in self.structs:
if struct.name == structName:
for member in struct.members:
if node.get('name') == member.baseType or node.get('name') + '*' == member.baseType:
sysType = VulkanSystemType(node.get('name'), self.extTypes[structName])
if sysType not in self.sysTypes:
self.sysTypes.add(sysType)
for funcName in self.extTypes[extension].vkfuncs:
for func in self.functions:
if func.name == funcName:
for param in func.parameters:
if node.get('name') == param.baseType or node.get('name') + '*' == param.baseType:
sysType = VulkanSystemType(node.get('name'), self.extFuncs[funcName])
if sysType not in self.sysTypes:
self.sysTypes.add(sysType)
# Find every @foreach, @if, and @end
forIter = re.finditer('(^\\s*\\@foreach\\s+[a-z]+(\\s+where\\(.*\\))?\\s*^)|(\\@foreach [a-z]+(\\s+where\\(.*\\))?\\b)', self.format, flags=re.MULTILINE)
ifIter = re.finditer('(^\\s*\\@if\\(.*\\)\\s*^)|(\\@if\\(.*\\))', self.format, flags=re.MULTILINE)
endIter = re.finditer('(^\\s*\\@end\\s+[a-z]+\\s*^)|(\\@end [a-z]+\\b)', self.format, flags=re.MULTILINE)
try:
nextFor = next(forIter)
except StopIteration:
nextFor = None
try:
nextIf = next(ifIter)
except StopIteration:
nextIf = None
try:
nextEnd = next(endIter)
except StopIteration:
nextEnd = None
# Match the beginnings to the ends
loops = []
unassignedControls = []
depth = 0
while nextFor != None or nextFor != None or nextEnd != None:
# If this is a @foreach
if nextFor != None and ((nextIf == None or nextFor.start() < nextIf.start()) and nextFor.start() < nextEnd.start()):
depth += 1
forType = re.search('(?<=\\s)[a-z]+', self.format[nextFor.start():nextFor.end()])
text = self.format[forType.start()+nextFor.start():forType.end()+nextFor.start()]
whereMatch = re.search('(?<=where\\().*(?=\\))', self.format[nextFor.start():nextFor.end()])
condition = None if whereMatch == None else self.format[whereMatch.start()+nextFor.start():whereMatch.end()+nextFor.start()]
unassignedControls.append((nextFor.start(), nextFor.end(), text, condition))
try:
nextFor = next(forIter)
except StopIteration:
nextFor = None
# If this is an @if
elif nextIf != None and nextIf.start() < nextEnd.start():
depth += 1
condMatch = re.search('(?<=if\\().*(?=\\))', self.format[nextIf.start():nextIf.end()])
condition = None if condMatch == None else self.format[condMatch.start()+nextIf.start():condMatch.end()+nextIf.start()]
unassignedControls.append((nextIf.start(), nextIf.end(), 'if', condition))
try:
nextIf = next(ifIter)
except StopIteration:
nextIf = None
# Else this is an @end
else:
depth -= 1
endType = re.search('(?<=\\s)[a-z]+', self.format[nextEnd.start():nextEnd.end()])
text = self.format[endType.start()+nextEnd.start():endType.end()+nextEnd.start()]
start = unassignedControls.pop(-1)
assert(start[2] == text)
item = Control(self.format, start[0:2], (nextEnd.start(), nextEnd.end()), text, start[3])
if len(loops) < 1 or depth < loops[-1][0]:
while len(loops) > 0 and depth < loops[-1][0]:
item.children.insert(0, loops.pop(-1)[1])
loops.append((depth, item))
else:
loops.append((depth, item))
try:
nextEnd = next(endIter)
except StopIteration:
nextEnd = None
# Expand each loop into its full form
lastIndex = 0
for _, loop in loops:
gen.write(self.format[lastIndex:loop.startPos[0]].format(**{}), file=self.outFile)
gen.write(self.expand(loop), file=self.outFile)
lastIndex = loop.endPos[1]
gen.write(self.format[lastIndex:-1].format(**{}), file=self.outFile)
gen.OutputGenerator.endFile(self)
def genCmd(self, cmd, name):
gen.OutputGenerator.genCmd(self, cmd, name)
self.functions.add(VulkanFunction(cmd.elem, self.constants))
# These are actually constants
def genEnum(self, enuminfo, name):
gen.OutputGenerator.genEnum(self, enuminfo, name)
# These are actually enums
def genGroup(self, groupinfo, groupName):
gen.OutputGenerator.genGroup(self, groupinfo, groupName)
if groupinfo.elem.get('type') == 'bitmask':
self.bitmasks.add(VulkanBitmask(groupinfo.elem, self.extensions))
elif groupinfo.elem.get('type') == 'enum':
self.enums.add(VulkanEnum(groupinfo.elem, self.extensions))
def genType(self, typeinfo, name):
gen.OutputGenerator.genType(self, typeinfo, name)
if typeinfo.elem.get('category') == 'struct':
self.structs.add(VulkanStruct(typeinfo.elem, self.constants))
elif typeinfo.elem.get('category') == 'basetype':
self.basetypes.add(VulkanBasetype(typeinfo.elem))
elif typeinfo.elem.get('category') == None and typeinfo.elem.get('requires') == 'vk_platform':
self.externalTypes.add(VulkanExternalType(typeinfo.elem))
elif typeinfo.elem.get('category') == 'handle':
self.handles.add(VulkanHandle(typeinfo.elem))
elif typeinfo.elem.get('category') == 'union':
self.unions.add(VulkanUnion(typeinfo.elem, self.constants))
elif typeinfo.elem.get('category') == 'bitmask':
self.flags.add(VulkanFlags(typeinfo.elem))
elif typeinfo.elem.get('category') == 'funcpointer':
self.funcPointers.add(VulkanFunctionPointer(typeinfo.elem))
def expand(self, loop, parents=[]):
# Figure out what we're dealing with
if loop.text == 'if':
subjects = [ Control.IfDummy() ]
elif loop.text == 'basetype':
subjects = self.basetypes
elif loop.text == 'bitmask':
subjects = self.bitmasks
elif loop.text == 'choice':
subjects = self.findByType([VulkanUnion], parents).choices
elif loop.text == 'enum':
subjects = self.enums
elif loop.text == 'extension':
subjects = self.extensions
elif loop.text == 'flag':
subjects = self.flags
elif loop.text == 'funcpointer':
subjects = self.funcPointers
elif loop.text == 'function':
subjects = self.functions
elif loop.text == 'handle':
subjects = self.handles
elif loop.text == 'option':
subjects = self.findByType([VulkanEnum, VulkanBitmask], parents).options
elif loop.text == 'member':
subjects = self.findByType([VulkanStruct], parents).members
elif loop.text == 'parameter':
subjects = self.findByType([VulkanFunction], parents).parameters
elif loop.text == 'struct':
subjects = self.structs
elif loop.text == 'systype':
subjects = self.sysTypes
elif loop.text == 'type':
subjects = self.externalTypes
elif loop.text == 'union':
subjects = self.unions
else:
assert(False)
# Generate the output string
out = ''
for item in subjects:
# Merge the values and the parent values
values = item.values().copy()
for parent in parents:
values.update(parent.values())
# Check if the condition is met
if loop.condition != None:
cond = eval(loop.condition.format(**values))
assert(cond == True or cond == False)
if not cond:
continue
# Check if an ifdef is needed
if item.name in self.extFuncs:
ext = self.extFuncs[item.name]
elif item.name in self.extTypes:
ext = self.extTypes[item.name]
elif item in self.sysTypes:
ext = item.ext
else:
ext = None
if ext != None and ext.guard != None:
out += '#if defined({})\n'.format(ext.guard)
# Format the string
lastIndex = loop.startPos[1]
for child in loop.children:
out += loop.fullString[lastIndex:child.startPos[0]].format(**values)
out += self.expand(child, parents=[item]+parents)
lastIndex = child.endPos[1]
out += loop.fullString[lastIndex:loop.endPos[0]].format(**values)
# Close the ifdef
if ext != None and ext.guard != None:
out += '#endif // {}\n'.format(ext.guard)
return out
def findByType(self, types, objects):
value = None
for item in objects:
for ty in types:
if isinstance(item, ty):
value = item
break
assert(value != None)
return value
class Control:
class IfDummy:
def __init__(self):
self.name = 'ifdummy'
def values(self):
return {}
def __init__(self, fullString, start, end, text, condition):
self.fullString = fullString
self.startPos = start
self.endPos = end
self.text = text
self.condition = condition
self.children = []
# Base class for VulkanStruct.Member and VulkanStruct.Parameter
class VulkanVariable:
def __init__(self, rootNode, constants, parentName):
# Set basic properties
self.name = rootNode.find('name').text # Variable name
self.typeID = rootNode.find('type').text # Typename, dereferenced and converted to a useable C++ token
self.baseType = self.typeID # Type, dereferenced to the non-pointer type
self.childType = None # Type, dereferenced to the non-pointer type (None if it isn't a pointer)
self.arrayLength = None # Length of the array, or None if it isn't an array
# Get the text of the variable type and name, but not the comment
self.text = ''
for node in rootNode.itertext():
comment = rootNode.find('comment')
if comment != None and comment.text == node:
continue
self.text += node
typeMatch = re.search('.+?(?=' + self.name + ')', self.text)
self.type = typeMatch.string[typeMatch.start():typeMatch.end()]
self.type = ' '.join(self.type.split())
bracketMatch = re.search('(?<=\\[)[a-zA-Z0-9_]+(?=\\])', self.text)
if bracketMatch != None:
matchText = bracketMatch.string[bracketMatch.start():bracketMatch.end()]
self.childType = self.type
self.type += '[' + matchText + ']'
if matchText in constants:
self.arrayLength = constants[matchText]
else:
self.arrayLength = matchText
self.lengthMember = False
lengthString = rootNode.get('len')
lengths = []
if lengthString != None:
lengths = re.split(',', lengthString)
lengths = list(filter(('null-terminated').__ne__, lengths))
assert(len(lengths) <= 1)
if self.arrayLength == None and len(lengths) > 0:
self.childType = '*'.join(self.type.split('*')[0:-1])
self.arrayLength = lengths[0]
self.lengthMember = True
if self.arrayLength != None and self.arrayLength.startswith('latexmath'):
code = self.arrayLength[10:len(self.arrayLength)]
code = re.sub('\\[', '', code)
code = re.sub('\\]', '', code)
code = re.sub('\\\\(lceil|rceil)', '', code)
code = re.sub('{|}', '', code)
code = re.sub('\\\\mathit', '', code)
code = re.sub('\\\\over', '/', code)
self.arrayLength = code
# Dereference if necessary and handle members of variables
if self.arrayLength != None:
self.arrayLength = re.sub('::', '->', self.arrayLength)
sections = self.arrayLength.split('->')
if sections[-1][0] == 'p' and sections[0][1].isupper():
self.arrayLength = '*' + self.arrayLength
self.pointerLevels = len(re.findall('\\*|\\[', self.text))
if self.typeID == 'char' and self.pointerLevels > 0:
self.baseType += '*'
self.pointerLevels -= 1
self.typeID = 'cstring'
elif self.typeID in POINTER_TYPES:
self.baseType += '*'
self.pointerLevels -= 1
assert(self.pointerLevels >= 0)
self.inheritedConditions = ''
if self.typeID in INHERITED_STATE and parentName in INHERITED_STATE[self.typeID]:
for states in INHERITED_STATE[self.typeID][parentName]:
self.inheritedConditions += ', ' + states['expr']
class VulkanBasetype:
def __init__(self, rootNode):
self.name = rootNode.get('name')
self.type = rootNode.get('type')
def values(self):
return {
'baseName': self.name,
'baseType': self.type,
}
class VulkanBitmask:
def __init__(self, rootNode, extensions):
self.name = rootNode.get('name')
self.type = rootNode.get('type')
# Read each value that the enum contains
self.options = []
for child in rootNode:
childName = child.get('name')
childValue = child.get('value')
childBitpos = child.get('bitpos')
childComment = child.get('comment')
if childName == None or (childValue == None and childBitpos == None):
continue
self.options.append(VulkanEnum.Option(childName, childValue, childBitpos, childComment))
for ext in extensions:
if self.name in ext.enumValues:
childName, childValue = ext.enumValues[self.name]
self.options.append(VulkanEnum.Option(childName, childValue, None, None))
def values(self):
return {
'bitName': self.name,
'bitType': self.type,
}
class VulkanEnum:
class Option:
def __init__(self, name, value, bitpos, comment):
self.name = name
self.comment = comment
if value == 0 or value == None:
value = 1 << int(bitpos)
self.value = value
def values(self):
return {
'optName': self.name,
'optValue': self.value,
'optComment': self.comment,
}
def __init__(self, rootNode, extensions):
self.name = rootNode.get('name')
self.type = rootNode.get('type')
# Read each value that the enum contains
self.options = []
for child in rootNode:
childName = child.get('name')
childValue = child.get('value')
childBitpos = child.get('bitpos')
childComment = child.get('comment')
if childName == None or (childValue == None and childBitpos == None):
continue
self.options.append(VulkanEnum.Option(childName, childValue, childBitpos, childComment))
for ext in extensions:
if self.name in ext.enumValues:
childName, childValue = ext.enumValues[self.name]
self.options.append(VulkanEnum.Option(childName, childValue, None, None))
def values(self):
return {
'enumName': self.name,
'enumType': self.type,
}
class VulkanExtension:
def __init__(self, rootNode):
self.name = rootNode.get('name')
self.number = int(rootNode.get('number'))
self.type = rootNode.get('type')
self.dependency = rootNode.get('requires')
self.guard = rootNode.get('protect')
self.supported = rootNode.get('supported')
self.vktypes = []
for ty in rootNode.find('require').findall('type'):
self.vktypes.append(ty.get('name'))
self.vkfuncs = []
for func in rootNode.find('require').findall('command'):
self.vkfuncs.append(func.get('name'))
self.constants = {}
self.enumValues = {}
for enum in rootNode.find('require').findall('enum'):
base = enum.get('extends')
name = enum.get('name')
value = enum.get('value')
bitpos = enum.get('bitpos')
offset = enum.get('offset')
if value == None and bitpos != None:
value = 1 << int(bitpos)
if offset != None:
offset = int(offset)
if base != None and offset != None:
enumValue = 1000000000 + 1000*(self.number - 1) + offset
if enum.get('dir') == '-':
enumValue = -enumValue;
self.enumValues[base] = (name, enumValue)
else:
self.constants[name] = value
def values(self):
return {
'extName': self.name,
'extNumber': self.number,
'extType': self.type,
'extDependency': self.dependency,
'extGuard': self.guard,
'extSupported': self.supported,
}
class VulkanExternalType:
def __init__(self, rootNode):
self.name = rootNode.get('name')
self.dependency = rootNode.get('requires')
def values(self):
return {
'etyName': self.name,
'etyDependency': self.dependency,
}
class VulkanFlags:
def __init__(self, rootNode):
self.name = rootNode.get('name')
self.type = rootNode.get('type')
self.enum = rootNode.get('requires')
def values(self):
return {
'flagName': self.name,
'flagType': self.type,
'flagEnum': self.enum,
}
class VulkanFunction:
class Parameter(VulkanVariable):
def __init__(self, rootNode, constants, parentName):
VulkanVariable.__init__(self, rootNode, constants, parentName)
def values(self):
return {
'prmName': self.name,
'prmBaseType': self.baseType,
'prmTypeID': self.typeID,
'prmType': self.type,
'prmChildType': self.childType,
'prmPtrLevel': self.pointerLevels,
'prmLength': self.arrayLength,
'prmInheritedConditions': self.inheritedConditions,
}
def __init__(self, rootNode, constants):
self.name = rootNode.find('proto').find('name').text
self.returnType = rootNode.find('proto').find('type').text
self.parameters = []
self.namedParams = ''
self.typedParams = ''
for node in rootNode.findall('param'):
self.parameters.append(VulkanFunction.Parameter(node, constants, self.name))
self.namedParams += self.parameters[-1].name + ', '
self.typedParams += self.parameters[-1].text + ', '
if len(self.parameters) > 0:
self.namedParams = self.namedParams[0:-2]
self.typedParams = self.typedParams[0:-2]
if self.parameters[0].type in ['VkInstance', 'VkPhysicalDevice'] or self.name == 'vkCreateInstance':
self.type = 'instance'
else:
self.type = 'device'
self.stateTrackingCode = ''
if self.name in TRACKED_STATE:
self.stateTrackingCode = TRACKED_STATE[self.name]
def values(self):
return {
'funcName': self.name,
'funcShortName': self.name[2:len(self.name)],
'funcType': self.type,
'funcReturn': self.returnType,
'funcNamedParams': self.namedParams,
'funcTypedParams': self.typedParams,
'funcDispatchParam': self.parameters[0].name,
'funcStateTrackingCode': self.stateTrackingCode
}
class VulkanFunctionPointer:
def __init__(self, rootNode):
self.name = rootNode.get('name')
def values(self):
return {
'pfnName': self.name,
}
class VulkanHandle:
def __init__(self, rootNode):
self.name = rootNode.get('name')
self.type = rootNode.get('type')
self.parent = rootNode.get('parent')
def values(self):
return {
'hdlName': self.name,
'hdlType': self.type,
'hdlParent': self.parent,
}
class VulkanStruct:
class Member(VulkanVariable):
def __init__(self, rootNode, constants, parentName):
VulkanVariable.__init__(self, rootNode, constants, parentName)
# Search for a member condition
self.condition = None
if rootNode.get('noautovalidity') == 'true' and parentName in VALIDITY_CHECKS and self.name in VALIDITY_CHECKS[parentName]:
self.condition = VALIDITY_CHECKS[parentName][self.name]
def values(self):
return {
'memName': self.name,
'memBaseType': self.baseType,
'memTypeID': self.typeID,
'memType': self.type,
'memChildType': self.childType,
'memPtrLevel': self.pointerLevels,
'memLength': self.arrayLength,
'memLengthIsMember': self.lengthMember,
'memCondition': self.condition,
'memInheritedConditions': self.inheritedConditions,
}
def __init__(self, rootNode, constants):
self.name = rootNode.get('name')
self.members = []
for node in rootNode.findall('member'):
self.members.append(VulkanStruct.Member(node, constants, self.name))
self.conditionVars = ''
if self.name in INHERITED_STATE:
for parent, states in INHERITED_STATE[self.name].items():
for state in states:
self.conditionVars += ', ' + state['type'] + ' ' + state['name'];
def values(self):
return {
'sctName': self.name,
'sctConditionVars': self.conditionVars,
}
class VulkanSystemType:
def __init__(self, name, ext):
self.name = name
self.type = self.name if name not in POINTER_TYPES else self.name + '*'
self.ext = ext
def __eq__(self, that):
return self.name == that.name and self.type == that.type
def __hash__(self):
return hash(self.name) | hash(self.type)
def values(self):
return {
'sysName': self.name,
'sysType': self.type,
}
class VulkanUnion:
class Choice(VulkanVariable):
def __init__(self, rootNode, constants, parentName):
VulkanVariable.__init__(self, rootNode, constants, parentName)
def values(self):
return {
'chcName': self.name,
'chcBaseType': self.baseType,
'chcTypeID': self.typeID,
'chcType': self.type,
'chcChildType': self.childType,
'chcPtrLevel': self.pointerLevels,
'chcLength': self.arrayLength,
#'chcLengthIsMember': self.lengthMember,
}
def __init__(self, rootNode, constants):
self.name = rootNode.get('name')
self.choices = []
for node in rootNode.findall('member'):
self.choices.append(VulkanUnion.Choice(node, constants, self.name))
def values(self):
return {
'unName': self.name,
}
| 39.065457
| 265
| 0.609932
|
76eefd0373ae912c8ca1b87c30540d93958574ae
| 78
|
py
|
Python
|
pcc_schedule/main.py
|
jaxlin12/PCC_Schedule_Spider
|
284690517c5687ccac0516043370862492e2cffe
|
[
"MIT"
] | 2
|
2021-01-06T21:57:29.000Z
|
2021-01-15T00:30:46.000Z
|
pcc_schedule/main.py
|
JaxLam/PCC_Schedule_Spider
|
284690517c5687ccac0516043370862492e2cffe
|
[
"MIT"
] | null | null | null |
pcc_schedule/main.py
|
JaxLam/PCC_Schedule_Spider
|
284690517c5687ccac0516043370862492e2cffe
|
[
"MIT"
] | null | null | null |
from scrapy import cmdline
cmdline.execute("scrapy crawl schedule".split())
| 26
| 49
| 0.782051
|
7b5a2fc90db47b60d608f0856814d0b735132d1a
| 292
|
py
|
Python
|
setup.py
|
nylas/stackcollector
|
2e9f72ee74587e0dea5ba4826cd60a093c8869f0
|
[
"MIT"
] | 592
|
2015-09-29T18:08:07.000Z
|
2022-03-14T04:18:15.000Z
|
setup.py
|
nylas/stackcollector
|
2e9f72ee74587e0dea5ba4826cd60a093c8869f0
|
[
"MIT"
] | 16
|
2015-10-29T15:55:17.000Z
|
2021-06-15T18:26:24.000Z
|
setup.py
|
nylas/stackcollector
|
2e9f72ee74587e0dea5ba4826cd60a093c8869f0
|
[
"MIT"
] | 62
|
2015-09-29T18:07:35.000Z
|
2021-08-06T09:11:00.000Z
|
from setuptools import setup, find_packages
setup(
name='stackcollector',
version='0.1',
packages=find_packages(),
install_requires=[
'requests>=2.4.3',
'flask>=0.10.1',
'nylas-production-python>=0.2.3',
'click',
'dateparser'
],
)
| 19.466667
| 43
| 0.568493
|
7f701961654ef4e5d1a62f6f85fb5a4339d084a8
| 2,094
|
py
|
Python
|
tests/gold_tests/body_factory/http204_response_plugin.test.py
|
AD5GB/trafficserver
|
b18b54ba5033da8de0983a07626dd24501474c93
|
[
"Apache-2.0"
] | null | null | null |
tests/gold_tests/body_factory/http204_response_plugin.test.py
|
AD5GB/trafficserver
|
b18b54ba5033da8de0983a07626dd24501474c93
|
[
"Apache-2.0"
] | null | null | null |
tests/gold_tests/body_factory/http204_response_plugin.test.py
|
AD5GB/trafficserver
|
b18b54ba5033da8de0983a07626dd24501474c93
|
[
"Apache-2.0"
] | 1
|
2021-08-28T09:50:10.000Z
|
2021-08-28T09:50:10.000Z
|
'''
Tests that plugins may break HTTP by sending 204 response bodies
'''
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
Test.Summary = '''
Tests that plugins may break HTTP by sending 204 response bodies
'''
ts = Test.MakeATSProcess("ts")
server = Test.MakeOriginServer("server")
CUSTOM_PLUGIN_204_HOST = 'www.customplugin204.test'
regex_remap_conf_file = "maps.reg"
ts.Disk.remap_config.AddLine(
f'map http://{CUSTOM_PLUGIN_204_HOST} http://127.0.0.1:{server.Variables.Port} @plugin=regex_remap.so @pparam={regex_remap_conf_file} @pparam=no-query-string @pparam=host'
)
ts.Disk.MakeConfigFile(regex_remap_conf_file).AddLine('//.*/ http://donotcare.test @status=204')
Test.PrepareTestPlugin(os.path.join(Test.Variables.AtsTestPluginsDir, 'custom204plugin.so'), ts)
Test.Setup.Copy(os.path.join(os.pardir, os.pardir, 'tools', 'tcp_client.py'))
Test.Setup.Copy('data')
tr = Test.AddTestRun("Test domain {0}".format(CUSTOM_PLUGIN_204_HOST))
tr.Processes.Default.StartBefore(Test.Processes.ts)
tr.StillRunningAfter = ts
tr.Processes.Default.Command = f"{sys.executable} tcp_client.py 127.0.0.1 {ts.Variables.port} data/{CUSTOM_PLUGIN_204_HOST}_get.test_input"
tr.Processes.Default.TimeOut = 5 # seconds
tr.Processes.Default.ReturnCode = 0
tr.Processes.Default.Streams.stdout = "gold/http-204-custom-plugin.gold"
| 40.269231
| 175
| 0.767431
|
b77ad3b48d96f6a6643eb7ee93fb4afafd3e690f
| 1,931
|
py
|
Python
|
test/test_workflow_decision_case.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 21
|
2018-03-29T14:20:35.000Z
|
2021-10-13T05:11:41.000Z
|
test/test_workflow_decision_case.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 14
|
2018-01-30T15:45:46.000Z
|
2022-02-23T14:23:21.000Z
|
test/test_workflow_decision_case.py
|
sdnit-se/intersight-python
|
551f7685c0f76bb8af60ec83ffb6f9672d49a4ae
|
[
"Apache-2.0"
] | 18
|
2018-01-03T15:09:56.000Z
|
2021-07-16T02:21:54.000Z
|
# coding: utf-8
"""
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. # noqa: E501
The version of the OpenAPI document: 1.0.9-1295
Contact: intersight@cisco.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import intersight
from intersight.models.workflow_decision_case import WorkflowDecisionCase # noqa: E501
from intersight.rest import ApiException
class TestWorkflowDecisionCase(unittest.TestCase):
"""WorkflowDecisionCase unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testWorkflowDecisionCase(self):
"""Test WorkflowDecisionCase"""
# FIXME: construct object with mandatory attributes with example values
# model = intersight.models.workflow_decision_case.WorkflowDecisionCase() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 50.815789
| 1,052
| 0.781978
|
21f0a1815ebe17095a89df9f4a772d36c49fc41b
| 5,539
|
py
|
Python
|
III-mono_neural_inference/as_nn_coronamask_depth.py
|
Ikomia-dev/ikomia-oakd
|
7a048dd82fefc56a9f28b889f7d988d931173c9a
|
[
"MIT"
] | null | null | null |
III-mono_neural_inference/as_nn_coronamask_depth.py
|
Ikomia-dev/ikomia-oakd
|
7a048dd82fefc56a9f28b889f7d988d931173c9a
|
[
"MIT"
] | null | null | null |
III-mono_neural_inference/as_nn_coronamask_depth.py
|
Ikomia-dev/ikomia-oakd
|
7a048dd82fefc56a9f28b889f7d988d931173c9a
|
[
"MIT"
] | null | null | null |
import depthai as dai
import time
import cv2
from pathlib import Path
# Draw ROI and class label of each detected thing if confidence>50%
def frame_process(frame, tensor):
color = (255,0,0)
keeped_roi = []
for i in range(100): # There is 100 detections, not all of them are relevant
if (tensor[i*7 + 2] >0.5): # 3rd value of each detection is the confidence
keeped_roi.append(tensor[i*7:i*7+7])
spatial_calculator_config = dai.SpatialLocationCalculatorConfig()
for id, label, confidence, left, top, right, bottom in keeped_roi:
topleft = (int(left*frame_width), int(top*frame_height))
bottomright = (int(right*frame_width), int(bottom*frame_height))
cv2.rectangle(frame, topleft, bottomright, color, 2) # ROI
cv2.putText(frame, labels[int(label)] + f" {int(confidence * 100)}%", (topleft[0] + 10, topleft[1] + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, color) # Label and confidence
# Add ROIs to spatial location calculator config
spatial_config_data = dai.SpatialLocationCalculatorConfigData()
spatial_config_data.depthThresholds.lowerThreshold = 250
spatial_config_data.depthThresholds.upperThreshold = 5000
spatial_config_data.roi = dai.Rect(dai.Point2f(topleft[0], topleft[1]), dai.Point2f(bottomright[0], bottomright[1]))
spatial_calculator_config.addROI(spatial_config_data)
# Put spatial location info inside of the ROI
if(len(keeped_roi)>0):
spatial_config_input_queue.send(spatial_calculator_config)
spatial_data = spatial_calculator_queue.get().getSpatialLocations()
for depth_data in spatial_data:
cv2.putText(frame, f"X: {int(depth_data.spatialCoordinates.x)} mm", (topleft[0] + 10, topleft[1] + 50), cv2.FONT_HERSHEY_TRIPLEX, 0.5, color)
cv2.putText(frame, f"Y: {int(depth_data.spatialCoordinates.y)} mm", (topleft[0] + 10, topleft[1] + 65), cv2.FONT_HERSHEY_TRIPLEX, 0.5, color)
cv2.putText(frame, f"Z: {int(depth_data.spatialCoordinates.z)} mm", (topleft[0] + 10, topleft[1] + 80), cv2.FONT_HERSHEY_TRIPLEX, 0.5, color)
return frame
# Define program parameters
nn_path = str(Path(__file__).parent) + "/../_models/coronamask.blob" # path to the neural network compiled model (.blob)
labels = ["background", "no mask", "mask", "no mask"]
pipeline = dai.Pipeline()
frame_width = 300
frame_height = 300
fps_limit = 20
# Configure spatial location calculator
spatial_location_calculator = pipeline.createSpatialLocationCalculator()
spatial_location_calculator.setWaitForConfigInput(True)
# Prepare depth handling
depth = pipeline.createStereoDepth()
depth.setConfidenceThreshold(255)
depth.depth.link(spatial_location_calculator.inputDepth)
# Set spatial location calculator input/output stream
spatial_data_output_stream = pipeline.createXLinkOut()
spatial_data_output_stream.setStreamName("spatialData")
spatial_location_calculator.out.link(spatial_data_output_stream.input)
spatial_config_input_stream = pipeline.createXLinkIn()
spatial_config_input_stream.setStreamName("spatialCalcConfig")
spatial_config_input_stream.out.link(spatial_location_calculator.inputConfig)
# Set rgb camera source
cam_rgb = pipeline.createColorCamera()
cam_rgb.setPreviewSize(frame_width, frame_height)
cam_rgb.setInterleaved(False)
cam_rgb.setFps(fps_limit)
# Set depth source
left = pipeline.createMonoCamera()
left.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
left.setBoardSocket(dai.CameraBoardSocket.LEFT)
right = pipeline.createMonoCamera()
right.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
right.setBoardSocket(dai.CameraBoardSocket.RIGHT)
# Configure neural network settings
nn = pipeline.createNeuralNetwork()
nn.setBlobPath(nn_path)
cam_rgb.preview.link(nn.input) # link cam_rgb to nn input layer
# Set rgb output stream
rgb_output_stream = pipeline.createXLinkOut()
rgb_output_stream.setStreamName("rgb")
nn.passthrough.link(rgb_output_stream.input)
# Set depth output stream
left.out.link(depth.left)
right.out.link(depth.right)
# Set neural network output stream
nn_output_stream = pipeline.createXLinkOut()
nn_output_stream.setStreamName("nn")
nn.out.link(nn_output_stream.input)
with dai.Device(pipeline) as device:
spatial_config_input_queue = device.getInputQueue("spatialCalcConfig")
rgb_queue = device.getOutputQueue(name="rgb", maxSize=4, blocking=False)
nn_queue = device.getOutputQueue(name="nn", maxSize=4, blocking=False)
spatial_calculator_queue = device.getOutputQueue(name="spatialData", maxSize=4, blocking=False)
frame = None
startTime = time.monotonic() # To determined FPS
counter = 0
while True:
rgb_current_output = rgb_queue.get()
nn_current_output = nn_queue.get()
if rgb_current_output is not None:
frame = rgb_current_output.getCvFrame()
cv2.putText(frame, "NN fps: {:.2f}".format(counter / (time.monotonic() - startTime)), (2, frame.shape[0] - 4), cv2.FONT_HERSHEY_TRIPLEX, 0.4, color=(255, 255, 255))
# Process the data thanks to the NNData object
if nn_current_output is not None:
tensor = nn_current_output.getLayerFp16("DetectionOutput") # Get detection tensor (output layer "DetectionOutput" with this model)
frame_process(frame, tensor)
counter += 1
if frame is not None:
cv2.imshow("output", frame)
if cv2.waitKey(1) == ord('q'):
break
| 41.02963
| 176
| 0.737137
|
380a6936a8d7733f3a4e5ddb570c5eb6a7a09d0c
| 123
|
py
|
Python
|
src/clilib/decorator/__init__.py
|
markediez/clilib
|
46753645b96b6482e38c14fc707a300e10faf330
|
[
"MIT"
] | null | null | null |
src/clilib/decorator/__init__.py
|
markediez/clilib
|
46753645b96b6482e38c14fc707a300e10faf330
|
[
"MIT"
] | 5
|
2020-08-27T17:40:29.000Z
|
2020-09-20T19:09:20.000Z
|
src/clilib/decorator/__init__.py
|
markediez/clilib
|
46753645b96b6482e38c14fc707a300e10faf330
|
[
"MIT"
] | null | null | null |
from clilib.decorator.arg import arg
from clilib.decorator.verb import verb
from clilib.decorator.resource import resource
| 30.75
| 46
| 0.853659
|
9bfe1e7c2f1f981767df5f5a275f0c644cbc47d6
| 1,900
|
py
|
Python
|
utils/helpers/dc_helper.py
|
AllenZYJ/torchcv
|
79f1a0a3b77820edbc42b79e0aa90dbf10d936c0
|
[
"Apache-2.0"
] | 1
|
2019-06-19T23:36:11.000Z
|
2019-06-19T23:36:11.000Z
|
utils/helpers/dc_helper.py
|
AllenZYJ/torchcv
|
79f1a0a3b77820edbc42b79e0aa90dbf10d936c0
|
[
"Apache-2.0"
] | null | null | null |
utils/helpers/dc_helper.py
|
AllenZYJ/torchcv
|
79f1a0a3b77820edbc42b79e0aa90dbf10d936c0
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: Donny You(youansheng@gmail.com)
import torch
import itertools
from extensions.tools.parallel import DataContainer
class DCHelper(object):
@staticmethod
def tolist(dc):
if isinstance(dc, (list, tuple)):
return dc
assert isinstance(dc, DataContainer), type(dc)
if dc.samples_per_gpu and not dc.stack:
return list(itertools.chain(*dc.data))
elif dc.samples_per_gpu and dc.stack:
return list(itertools.chain(*[[item for item in sub_batch] for sub_batch in dc.data]))
else:
return dc.data
@staticmethod
def totensor(dc):
assert isinstance(dc, DataContainer), type(dc)
if isinstance(dc.data, torch.Tensor):
return dc.data
assert isinstance(dc.data, (list, tuple)) and isinstance(dc.data[0], torch.Tensor)
return torch.cat(dc.data, 0)
@staticmethod
def todc(data_list, samples_per_gpu=True, stack=False, cpu_only=False, device_ids=None):
if not samples_per_gpu:
if not stack:
return DataContainer(data_list, stack=stack, samples_per_gpu=samples_per_gpu, cpu_only=cpu_only)
else:
return DataContainer(torch.stack(data_list, 0), stack=stack, samples_per_gpu=samples_per_gpu, cpu_only=cpu_only)
device_ids = list(range(torch.cuda.device_count())) if device_ids is None else device_ids
samples = (len(data_list) - 1 + len(device_ids)) // len(device_ids)
stacked = []
for i in range(0, len(data_list), samples):
if not stack:
stacked.append(data_list[i:i + samples])
else:
stacked.append(torch.stack(data_list[i:i + samples], 0))
return DataContainer(stacked, stack=stack, samples_per_gpu=samples_per_gpu, cpu_only=cpu_only)
| 35.185185
| 129
| 0.643158
|
7b25631965f16574de3dcc3ec3d769a184d0697c
| 6,767
|
py
|
Python
|
releasefiles/reports2/create-firewall-data.py
|
sonatype-nexus-community/iq-success-metrics2
|
5c61671305b618e838fbb1af22157062bfdc5a2e
|
[
"Apache-2.0"
] | 8
|
2020-10-10T03:55:16.000Z
|
2022-01-28T14:40:36.000Z
|
releasefiles/reports2/create-firewall-data.py
|
sonatype-nexus-community/iq-success-metrics2
|
5c61671305b618e838fbb1af22157062bfdc5a2e
|
[
"Apache-2.0"
] | 14
|
2020-05-28T19:32:33.000Z
|
2021-11-10T18:21:58.000Z
|
releasefiles/reports2/create-firewall-data.py
|
sonatype-nexus-community/iq-success-metrics2
|
5c61671305b618e838fbb1af22157062bfdc5a2e
|
[
"Apache-2.0"
] | 10
|
2020-08-20T14:17:45.000Z
|
2022-01-23T15:32:00.000Z
|
import json
import requests
import os
import os.path
import sys
import shutil
import csv
debug = False
iqurl = sys.argv[1]
iquser = sys.argv[2]
iqpwd = sys.argv[3]
if len(sys.argv) == 5:
if sys.argv[4] == "debug":
debug = True
jsonfile = 'firewalldata.json'
csvfile = 'firewalldata.csv'
firewall_api = "api/experimental/firewall"
quarantine_datadir = "./quarantine_data"
if os.path.exists(quarantine_datadir):
shutil.rmtree(quarantine_datadir)
os.mkdir(quarantine_datadir)
def get_nexusiq_data(end_point):
url = "{}/{}/{}" . format(iqurl, firewall_api, end_point)
req = requests.get(url, auth=(iquser, iqpwd), verify=False)
if req.status_code == 200:
res = req.json()
else:
res = "Error fetching data"
return res
def print_json(json_data, json_file):
output_file = "{}/{}{}".format(quarantine_datadir, json_file, ".json")
json_formatted = json.dumps(json_data, indent=2)
print(json_formatted)
with open(output_file, 'w') as outfile:
json.dump(json_data, outfile, indent=2)
return
def summary_report(report_name, end_point):
data = get_nexusiq_data(end_point)
print_json(data, report_name)
csv_file = "{}/{}{}".format(quarantine_datadir, report_name, ".csv")
with open(csv_file, 'w') as fd:
writer = csv.writer(fd, delimiter=",")
# print header
line = []
for key in data.keys():
line.append(key)
writer.writerow(line)
# print data
line = []
for value in data.values():
line.append(value)
writer.writerow(line)
return
def list_report(report_name, end_point):
page = 1
page_size = 250
page_count = page_query(end_point, page, page_size, report_name)
if page_count > 0:
while page <= page_count:
page += 1
page_query(end_point, page, page_size, report_name)
return
def page_query(end_point, page, page_size, report_name):
asc = True
if report_name == "autoreleased_from_quarantine_components":
sort_by = "releaseQuarantineTime"
else:
sort_by = "quarantineTime"
query = "{}?page={}&pageSize={}&sortBy={}&asc={}".format(end_point, page, page_size, sort_by, asc)
data = get_nexusiq_data(query)
page_count = data["pageCount"]
results = data["results"]
if len(results) > 0:
print_list_report(data["results"], report_name, page)
return page_count
def print_list_report(results, report_name, page):
if debug:
print_json(results, report_name + "_" + str(page))
csv_file = "{}/{}{}".format(quarantine_datadir, report_name, ".csv")
with open(csv_file, 'a') as fd:
writer = csv.writer(fd, delimiter=",")
line = []
line.append("repository")
line.append("quarantine_date")
line.append("date_cleared")
line.append("path_name")
line.append("format")
line.append("quarantined")
line.append("policy_name")
line.append("threat_level")
line.append("cve")
writer.writerow(line)
for result in results:
repository = result["repository"]
quarantine_date = result["quarantineDate"]
date_cleared = result["dateCleared"]
path_name = result["pathname"]
quarantined = result["quarantined"]
format = result["componentIdentifier"]["format"]
if result["quarantinePolicyViolations"]:
for quarantinePolicyViolation in result["quarantinePolicyViolations"]:
policy_name = quarantinePolicyViolation["policyName"]
threat_level = quarantinePolicyViolation["threatLevel"]
for constraint in quarantinePolicyViolation["constraintViolations"]:
cve = getCVE(constraint["reasons"])
line = []
line.append(repository)
line.append(quarantine_date)
line.append(date_cleared)
line.append(path_name)
line.append(format)
line.append(quarantined)
line.append(policy_name)
line.append(threat_level)
line.append(cve)
writer.writerow(line)
else:
line = []
line.append(repository)
line.append(quarantine_date)
line.append(date_cleared)
line.append(path_name)
line.append(format)
line.append(quarantined)
line.append()
line.append()
writer.writerow(line)
return
def itemExists(item,items):
exists = False
for i in items:
if i == item:
exists = True
break
return exists
def getCVE(reasons):
values = []
f = ""
for reason in reasons:
reference = reason["reference"]
if not reference is None:
newValue = reference["value"]
if not itemExists(newValue, values):
values.append(newValue)
for v in values:
f = f.join(v + ":")
f = f[:-1]
return f
def autoreleased_from_quarantine_config():
end_point = "releaseQuarantine/configuration"
data = get_nexusiq_data(end_point)
print_json(data, "autoreleased_from_quarantine_config")
csv_file = "{}/{}{}".format(quarantine_datadir, "autoreleased_from_quarantine_config", ".csv")
with open(csv_file, 'w') as fd:
writer = csv.writer(fd, delimiter=",")
# print header
line = []
line.append("id")
line.append("name")
line.append("autoReleaseQuarantineEnabled")
writer.writerow(line)
# print data
for d in data:
line = []
line.append(d["id"])
line.append(d["name"])
line.append(d["autoReleaseQuarantineEnabled"])
writer.writerow(line)
return
def main():
summary_report("autoreleased_from_quarantine_summary", "releaseQuarantine/summary")
summary_report("quarantined_components_summary", "quarantine/summary")
autoreleased_from_quarantine_config()
list_report("autoreleased_from_quarantine_components", "components/autoReleasedFromQuarantine")
list_report("quarantined_components", "components/quarantined")
if __name__ == '__main__':
main()
| 27.28629
| 102
| 0.578247
|
305aa4a0018ab37d9f318fd1ec495fa907c2f1e7
| 491
|
py
|
Python
|
Python/io_image.py
|
abondar24/OpenCVBase
|
9b23e3b31304e77ad1135d90efb41e3dc069194a
|
[
"Apache-2.0"
] | null | null | null |
Python/io_image.py
|
abondar24/OpenCVBase
|
9b23e3b31304e77ad1135d90efb41e3dc069194a
|
[
"Apache-2.0"
] | null | null | null |
Python/io_image.py
|
abondar24/OpenCVBase
|
9b23e3b31304e77ad1135d90efb41e3dc069194a
|
[
"Apache-2.0"
] | null | null | null |
import cv2
import numpy as np
import os
image = cv2.imread('hard.jpg')
cv2.imwrite('president.jpg', image)
gray_image = cv2.imread('hard.jpg', cv2.CV_LOAD_IMAGE_GRAYSCALE)
cv2.imwrite('pr1.png', gray_image)
random_byte_array = bytearray(os.urandom(120000))
flat_numpy_array = np.array(random_byte_array)
gray_image = flat_numpy_array.reshape(300, 400)
cv2.imwrite('random_gray.png', gray_image)
bgr_image = flat_numpy_array.reshape(100, 400, 3)
cv2.imwrite('RandomColor.png', bgr_image)
| 27.277778
| 64
| 0.782077
|
a28dca29df01ff9c168c9a7b0863460aad30ea57
| 425
|
py
|
Python
|
utils/math_utils.py
|
gbanegas/KissECC
|
2630998178955a6e68adec34e30802447057c58f
|
[
"Apache-2.0"
] | 1
|
2017-02-08T16:01:48.000Z
|
2017-02-08T16:01:48.000Z
|
utils/math_utils.py
|
gbanegas/ecc
|
2630998178955a6e68adec34e30802447057c58f
|
[
"Apache-2.0"
] | null | null | null |
utils/math_utils.py
|
gbanegas/ecc
|
2630998178955a6e68adec34e30802447057c58f
|
[
"Apache-2.0"
] | null | null | null |
class MathUtil(object):
@staticmethod
def sqrt(n, q):
"""sqrt on PN modulo: returns two numbers or exception if not exist
>>> assert (sqrt(n, q)[0] ** 2) % q == n
>>> assert (sqrt(n, q)[1] ** 2) % q == n
"""
assert n < q
for i in range(1, q):
if i * i % q == n:
return (i, q - i)
pass
raise Exception("sqrt not found")
| 26.5625
| 75
| 0.447059
|
366d153892c9f3f8f7898d390b54a8ac201beefa
| 2,176
|
py
|
Python
|
pendulum/server.py
|
DouglasOrr/Snippets
|
026e15a422b518ee7d9ce4849f971c4403ad9fe8
|
[
"MIT"
] | null | null | null |
pendulum/server.py
|
DouglasOrr/Snippets
|
026e15a422b518ee7d9ce4849f971c4403ad9fe8
|
[
"MIT"
] | 1
|
2020-04-11T18:07:19.000Z
|
2020-04-11T18:07:19.000Z
|
pendulum/server.py
|
DouglasOrr/Snippets
|
026e15a422b518ee7d9ce4849f971c4403ad9fe8
|
[
"MIT"
] | null | null | null |
import math, controllers
from flask import Flask, jsonify, request, redirect, url_for
from pendulum import *
class Sim:
THRESHOLD = math.pi / 3
def __init__(self):
self.set_controller('none')
self.restart()
def set_controller(self, name):
self.controller_name = name
self.controller = ManualControl(10, controllers.ALL[name])
def restart(self):
self.simulation = Simulation(SineNoise(
lambda t: 0.5 + t / 20,
[(0.73, 1), (2, 1), (2.9, 0.5), (13, 0.5)])
)
self.stopped = False
self.manual = False
self.last_controller_name = 'none'
def step(self, dt, manual_control):
if not self.stopped:
self.last_controller_name = self.controller_name
self.controller.control = manual_control
self.manual = self.manual or manual_control
n = int(math.ceil(dt / self.simulation.dt))
for i in range(0, n):
self.simulation.step(self.controller)
if Sim.THRESHOLD < abs(self.simulation.theta):
self.stopped = True
break
def state(self):
return {'theta': self.simulation.theta,
't': self.simulation.t,
'stopped': self.stopped,
'threshold': Sim.THRESHOLD,
'manual': self.manual,
'auto': self.last_controller_name}
app = Flask(__name__)
simulation = Sim()
@app.route('/')
def index():
return redirect(url_for('static', filename='pendulum.html'))
@app.route("/step", methods=['POST'])
def step():
simulation.step(float(request.form['dt']), int(request.form.get('control', '0')))
return jsonify(simulation.state())
@app.route("/restart", methods=['POST'])
def restart():
simulation.restart()
return ""
@app.route("/controllers", methods=['GET'])
def list_controllers():
return jsonify({'controllers': sorted(controllers.ALL.keys())})
@app.route("/controller", methods=['POST'])
def set_controller():
simulation.set_controller(request.form['name'])
return ""
if __name__ == "__main__":
app.run(debug=True)
| 29.808219
| 85
| 0.598346
|
eee52cfba7608d2264c3466e34b465201e86072d
| 15,970
|
py
|
Python
|
napari/_qt/qt_viewer.py
|
PBLab/napari
|
a10a88b2115160211863b47ee7d3012fc8c7beff
|
[
"BSD-3-Clause"
] | null | null | null |
napari/_qt/qt_viewer.py
|
PBLab/napari
|
a10a88b2115160211863b47ee7d3012fc8c7beff
|
[
"BSD-3-Clause"
] | 4
|
2019-12-08T20:20:44.000Z
|
2020-01-16T21:57:33.000Z
|
napari/_qt/qt_viewer.py
|
PBLab/napari
|
a10a88b2115160211863b47ee7d3012fc8c7beff
|
[
"BSD-3-Clause"
] | null | null | null |
import os.path
import inspect
from pathlib import Path
from qtpy import QtGui
from qtpy.QtCore import QCoreApplication, Qt, QSize
from qtpy.QtWidgets import QWidget, QVBoxLayout, QFileDialog, QSplitter
from qtpy.QtGui import QCursor, QPixmap
from qtpy.QtCore import QThreadPool
from vispy.scene import SceneCanvas, PanZoomCamera, ArcballCamera
from vispy.visuals.transforms import ChainTransform
from .qt_dims import QtDims
from .qt_layerlist import QtLayerList
from ..resources import resources_dir
from ..utils.theme import template
from ..utils.misc import str_to_rgb
from ..utils.interactions import (
ReadOnlyWrapper,
mouse_press_callbacks,
mouse_move_callbacks,
mouse_release_callbacks,
)
from ..utils.keybindings import components_to_key_combo
from .utils import QImg2array
from .qt_controls import QtControls
from .qt_viewer_buttons import QtLayerButtons, QtViewerButtons
from .qt_console import QtConsole
from .qt_viewer_dock_widget import QtViewerDockWidget
from .qt_about_keybindings import QtAboutKeybindings
from .._vispy import create_vispy_visual
class QtViewer(QSplitter):
with open(os.path.join(resources_dir, 'stylesheet.qss'), 'r') as f:
raw_stylesheet = f.read()
def __init__(self, viewer):
super().__init__()
self.pool = QThreadPool()
QCoreApplication.setAttribute(
Qt.AA_UseStyleSheetPropagationInWidgetStyles, True
)
self.viewer = viewer
self.dims = QtDims(self.viewer.dims)
self.controls = QtControls(self.viewer)
self.layers = QtLayerList(self.viewer.layers)
self.layerButtons = QtLayerButtons(self.viewer)
self.viewerButtons = QtViewerButtons(self.viewer)
self.console = QtConsole({'viewer': self.viewer})
layerList = QWidget()
layerList.setObjectName('layerList')
layerListLayout = QVBoxLayout()
layerListLayout.addWidget(self.layerButtons)
layerListLayout.addWidget(self.layers)
layerListLayout.addWidget(self.viewerButtons)
layerListLayout.setContentsMargins(8, 4, 8, 6)
layerList.setLayout(layerListLayout)
self.dockLayerList = QtViewerDockWidget(
self,
layerList,
name='layer list',
area='left',
allowed_areas=['left', 'right'],
)
self.dockLayerControls = QtViewerDockWidget(
self,
self.controls,
name='layer controls',
area='left',
allowed_areas=['left', 'right'],
)
self.dockConsole = QtViewerDockWidget(
self,
self.console,
name='console',
area='bottom',
allowed_areas=['top', 'bottom'],
shortcut='Ctrl+Shift+C',
)
self.dockConsole.setVisible(False)
self.dockLayerControls.visibilityChanged.connect(self._constrain_width)
self.dockLayerList.setMaximumWidth(258)
self.dockLayerList.setMinimumWidth(258)
self.aboutKeybindings = QtAboutKeybindings(self.viewer)
self.aboutKeybindings.hide()
# This dictionary holds the corresponding vispy visual for each layer
self.layer_to_visual = {}
if self.console.shell is not None:
self.viewerButtons.consoleButton.clicked.connect(
lambda: self.toggle_console()
)
else:
self.viewerButtons.consoleButton.setEnabled(False)
self.canvas = SceneCanvas(keys=None, vsync=True)
self.canvas.events.ignore_callback_errors = False
self.canvas.events.draw.connect(self.dims.enable_play)
self.canvas.native.setMinimumSize(QSize(200, 200))
self.canvas.context.set_depth_func('lequal')
self.canvas.connect(self.on_mouse_move)
self.canvas.connect(self.on_mouse_press)
self.canvas.connect(self.on_mouse_release)
self.canvas.connect(self.on_key_press)
self.canvas.connect(self.on_key_release)
self.canvas.connect(self.on_draw)
self.view = self.canvas.central_widget.add_view()
self._update_camera()
main_widget = QWidget()
main_layout = QVBoxLayout()
main_layout.setContentsMargins(10, 22, 10, 2)
main_layout.addWidget(self.canvas.native)
main_layout.addWidget(self.dims)
main_layout.setSpacing(10)
main_widget.setLayout(main_layout)
self.setOrientation(Qt.Vertical)
self.addWidget(main_widget)
self._last_visited_dir = str(Path.home())
self._cursors = {
'disabled': QCursor(
QPixmap(':/icons/cursor/cursor_disabled.png').scaled(20, 20)
),
'cross': Qt.CrossCursor,
'forbidden': Qt.ForbiddenCursor,
'pointing': Qt.PointingHandCursor,
'standard': QCursor(),
}
self._update_palette(viewer.palette)
self._key_release_generators = {}
self.viewer.events.interactive.connect(self._on_interactive)
self.viewer.events.cursor.connect(self._on_cursor)
self.viewer.events.reset_view.connect(self._on_reset_view)
self.viewer.events.palette.connect(
lambda event: self._update_palette(event.palette)
)
self.viewer.layers.events.reordered.connect(self._reorder_layers)
self.viewer.layers.events.added.connect(self._add_layer)
self.viewer.layers.events.removed.connect(self._remove_layer)
self.viewer.dims.events.camera.connect(
lambda event: self._update_camera()
)
# stop any animations whenever the layers change
self.viewer.events.layers_change.connect(lambda x: self.dims.stop())
self.setAcceptDrops(True)
def _constrain_width(self, event):
# allow the layer controls to be wider, only if floated
if self.dockLayerControls.isFloating():
self.controls.setMaximumWidth(700)
else:
self.controls.setMaximumWidth(220)
def _add_layer(self, event):
"""When a layer is added, set its parent and order."""
layers = event.source
layer = event.item
vispy_layer = create_vispy_visual(layer)
vispy_layer.camera = self.view.camera
vispy_layer.node.parent = self.view.scene
vispy_layer.order = len(layers)
self.layer_to_visual[layer] = vispy_layer
def _remove_layer(self, event):
"""When a layer is removed, remove its parent."""
layer = event.item
vispy_layer = self.layer_to_visual[layer]
vispy_layer.node.transforms = ChainTransform()
vispy_layer.node.parent = None
del self.layer_to_visual[layer]
def _reorder_layers(self, event):
"""When the list is reordered, propagate changes to draw order."""
for i, layer in enumerate(self.viewer.layers):
vispy_layer = self.layer_to_visual[layer]
vispy_layer.order = i
self.canvas._draw_order.clear()
self.canvas.update()
def _update_camera(self):
if self.viewer.dims.ndisplay == 3:
# Set a 3D camera
if not isinstance(self.view.camera, ArcballCamera):
self.view.camera = ArcballCamera(name="ArcballCamera", fov=0)
# flip y-axis to have correct alignment
# self.view.camera.flip = (0, 1, 0)
self.view.camera.viewbox_key_event = viewbox_key_event
self.viewer.reset_view()
else:
# Set 2D camera
if not isinstance(self.view.camera, PanZoomCamera):
self.view.camera = PanZoomCamera(
aspect=1, name="PanZoomCamera"
)
# flip y-axis to have correct alignment
self.view.camera.flip = (0, 1, 0)
self.view.camera.viewbox_key_event = viewbox_key_event
self.viewer.reset_view()
def screenshot(self):
"""Take currently displayed screen and convert to an image array.
Returns
-------
image : array
Numpy array of type ubyte and shape (h, w, 4). Index [0, 0] is the
upper-left corner of the rendered region.
"""
img = self.canvas.native.grabFramebuffer()
return QImg2array(img)
def _open_images(self):
"""Add image files from the menubar."""
filenames, _ = QFileDialog.getOpenFileNames(
parent=self,
caption='Select image(s)...',
directory=self._last_visited_dir, # home dir by default
)
if (filenames != []) and (filenames is not None):
self._add_files(filenames)
def _open_folder(self):
"""Add a folder of files from the menubar."""
folder = QFileDialog.getExistingDirectory(
parent=self,
caption='Select folder...',
directory=self._last_visited_dir, # home dir by default
)
if folder not in {'', None}:
self._add_files([folder])
def _add_files(self, filenames):
"""Add an image layer to the viewer.
If multiple images are selected, they are stacked along the 0th
axis.
Parameters
-------
filenames : list
List of filenames to be opened
"""
if len(filenames) > 0:
self.viewer.add_image(path=filenames)
self._last_visited_dir = os.path.dirname(filenames[0])
def _on_interactive(self, event):
self.view.interactive = self.viewer.interactive
def _on_cursor(self, event):
cursor = self.viewer.cursor
size = self.viewer.cursor_size
if cursor == 'square':
if size < 10 or size > 300:
q_cursor = self._cursors['cross']
else:
q_cursor = QCursor(
QPixmap(':/icons/cursor/cursor_square.png').scaledToHeight(
size
)
)
else:
q_cursor = self._cursors[cursor]
self.canvas.native.setCursor(q_cursor)
def _on_reset_view(self, event):
if isinstance(self.view.camera, ArcballCamera):
quat = self.view.camera._quaternion.create_from_axis_angle(
*event.quaternion
)
self.view.camera._quaternion = quat
self.view.camera.center = event.center
self.view.camera.scale_factor = event.scale_factor
else:
# Assumes default camera has the same properties as PanZoomCamera
self.view.camera.rect = event.rect
def _update_palette(self, palette):
# template and apply the primary stylesheet
themed_stylesheet = template(self.raw_stylesheet, **palette)
self.console.style_sheet = themed_stylesheet
self.console.syntax_style = palette['syntax_style']
bracket_color = QtGui.QColor(*str_to_rgb(palette['highlight']))
self.console._bracket_matcher.format.setBackground(bracket_color)
self.setStyleSheet(themed_stylesheet)
self.aboutKeybindings.setStyleSheet(themed_stylesheet)
self.canvas.bgcolor = palette['canvas']
def toggle_console(self):
"""Toggle console visible and not visible."""
viz = not self.dockConsole.isVisible()
# modulate visibility at the dock widget level as console is docakable
self.dockConsole.setVisible(viz)
if self.dockConsole.isFloating():
self.dockConsole.setFloating(True)
self.viewerButtons.consoleButton.setProperty(
'expanded', self.dockConsole.isVisible()
)
self.viewerButtons.consoleButton.style().unpolish(
self.viewerButtons.consoleButton
)
self.viewerButtons.consoleButton.style().polish(
self.viewerButtons.consoleButton
)
def on_mouse_press(self, event):
"""Called whenever mouse pressed in canvas.
"""
if event.pos is None:
return
event = ReadOnlyWrapper(event)
mouse_press_callbacks(self.viewer, event)
layer = self.viewer.active_layer
if layer is not None:
# Line bellow needed until layer mouse callbacks are refactored
self.layer_to_visual[layer].on_mouse_press(event)
mouse_press_callbacks(layer, event)
def on_mouse_move(self, event):
"""Called whenever mouse moves over canvas.
"""
if event.pos is None:
return
mouse_move_callbacks(self.viewer, event)
layer = self.viewer.active_layer
if layer is not None:
# Line bellow needed until layer mouse callbacks are refactored
self.layer_to_visual[layer].on_mouse_move(event)
mouse_move_callbacks(layer, event)
def on_mouse_release(self, event):
"""Called whenever mouse released in canvas.
"""
mouse_release_callbacks(self.viewer, event)
layer = self.viewer.active_layer
if layer is not None:
# Line bellow needed until layer mouse callbacks are refactored
self.layer_to_visual[layer].on_mouse_release(event)
mouse_release_callbacks(layer, event)
def on_key_press(self, event):
"""Called whenever key pressed in canvas.
"""
if (
event.native is not None
and event.native.isAutoRepeat()
and event.key.name not in ['Up', 'Down', 'Left', 'Right']
) or event.key is None:
# pass is no key is present or if key is held down, unless the
# key being held down is one of the navigation keys
return
comb = components_to_key_combo(event.key.name, event.modifiers)
layer = self.viewer.active_layer
if layer is not None and comb in layer.keymap:
parent = layer
elif comb in self.viewer.keymap:
parent = self.viewer
else:
return
func = parent.keymap[comb]
gen = func(parent)
if inspect.isgenerator(gen):
try:
next(gen)
except StopIteration: # only one statement
pass
else:
self._key_release_generators[event.key] = gen
def on_key_release(self, event):
"""Called whenever key released in canvas.
"""
try:
next(self._key_release_generators[event.key])
except (KeyError, StopIteration):
pass
def on_draw(self, event):
"""Called whenever drawn in canvas. Called for all layers, not just top
"""
for visual in self.layer_to_visual.values():
visual.on_draw(event)
def keyPressEvent(self, event):
self.canvas._backend._keyEvent(self.canvas.events.key_press, event)
event.accept()
def keyReleaseEvent(self, event):
self.canvas._backend._keyEvent(self.canvas.events.key_release, event)
event.accept()
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.accept()
else:
event.ignore()
def dropEvent(self, event):
"""Add local files and web URLS with drag and drop."""
filenames = []
for url in event.mimeData().urls():
if url.isLocalFile():
filenames.append(url.toLocalFile())
else:
filenames.append(url.toString())
self._add_files(filenames)
def closeEvent(self, event):
if self.pool.activeThreadCount() > 0:
self.pool.clear()
event.accept()
def shutdown(self):
self.pool.clear()
self.canvas.close()
self.console.shutdown()
def viewbox_key_event(event):
"""ViewBox key event handler
Parameters
----------
event : instance of Event
The event.
"""
return
| 34.945295
| 79
| 0.625172
|
1b32a3b84b78c2b4a0fb39c334952a8bb248ad95
| 37,472
|
py
|
Python
|
tensorflow_serving/model_servers/tensorflow_model_server_test.py
|
ccl0326/serving
|
a9780780ab00ee3903576d6f062a2908f0ad192a
|
[
"Apache-2.0"
] | 1
|
2020-05-24T02:50:50.000Z
|
2020-05-24T02:50:50.000Z
|
tensorflow_serving/model_servers/tensorflow_model_server_test.py
|
ccl0326/serving
|
a9780780ab00ee3903576d6f062a2908f0ad192a
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_serving/model_servers/tensorflow_model_server_test.py
|
ccl0326/serving
|
a9780780ab00ee3903576d6f062a2908f0ad192a
|
[
"Apache-2.0"
] | 1
|
2020-12-04T14:52:46.000Z
|
2020-12-04T14:52:46.000Z
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow_model_server."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import atexit
import json
import os
import shlex
import socket
import subprocess
import sys
import time
# This is a placeholder for a Google-internal import.
import grpc
from six.moves import range
from six.moves import urllib
import tensorflow as tf
from tensorflow.core.framework import types_pb2
from tensorflow.python.eager import context
from tensorflow.python.framework import config as device_config
from tensorflow.python.platform import flags
from tensorflow.python.saved_model import signature_constants
from tensorflow_serving.apis import classification_pb2
from tensorflow_serving.apis import get_model_status_pb2
from tensorflow_serving.apis import inference_pb2
from tensorflow_serving.apis import model_service_pb2_grpc
from tensorflow_serving.apis import predict_pb2
from tensorflow_serving.apis import prediction_service_pb2_grpc
from tensorflow_serving.apis import regression_pb2
FLAGS = flags.FLAGS
RPC_TIMEOUT = 5.0
HTTP_REST_TIMEOUT_MS = 5000
CHANNEL_WAIT_TIMEOUT = 5.0
WAIT_FOR_SERVER_READY_INT_SECS = 60
GRPC_SOCKET_PATH = '/tmp/tf-serving.sock'
def SetVirtualCpus(num_virtual_cpus):
"""Create virtual CPU devices if they haven't yet been created."""
if num_virtual_cpus < 1:
raise ValueError('`num_virtual_cpus` must be at least 1 not %r' %
(num_virtual_cpus,))
physical_devices = device_config.list_physical_devices('CPU')
if not physical_devices:
raise RuntimeError('No CPUs found')
configs = device_config.get_virtual_device_configuration(physical_devices[0])
if configs is None:
virtual_devices = [context.VirtualDeviceConfiguration()
for _ in range(num_virtual_cpus)]
device_config.set_virtual_device_configuration(
physical_devices[0], virtual_devices)
else:
if len(configs) < num_virtual_cpus:
raise RuntimeError('Already configured with %d < %d virtual CPUs' %
(len(configs), num_virtual_cpus))
def PickUnusedPort():
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
return port
def WaitForServerReady(port):
"""Waits for a server on the localhost to become ready."""
for _ in range(0, WAIT_FOR_SERVER_READY_INT_SECS):
time.sleep(1)
request = predict_pb2.PredictRequest()
request.model_spec.name = 'intentionally_missing_model'
try:
# Send empty request to missing model
channel = grpc.insecure_channel('localhost:{}'.format(port))
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
stub.Predict(request, RPC_TIMEOUT)
except grpc.RpcError as error:
# Missing model error will have details containing 'Servable'
if 'Servable' in error.details():
print('Server is ready')
break
def CallREST(url, req, max_attempts=60):
"""Returns HTTP response body from a REST API call."""
for attempt in range(max_attempts):
try:
print('Attempt {}: Sending request to {} with data:\n{}'.format(
attempt, url, req))
json_data = json.dumps(req).encode('utf-8') if req is not None else None
resp = urllib.request.urlopen(urllib.request.Request(url, data=json_data))
resp_data = resp.read()
print('Received response:\n{}'.format(resp_data))
resp.close()
return resp_data
except Exception as e: # pylint: disable=broad-except
print('Failed attempt {}. Error: {}'.format(attempt, e))
if attempt == max_attempts - 1:
raise
print('Retrying...')
time.sleep(1)
class TensorflowModelServerTest(tf.test.TestCase):
"""This class defines integration test cases for tensorflow_model_server."""
@staticmethod
def __TestSrcDirPath(relative_path=''):
return os.path.join(os.environ['TEST_SRCDIR'],
'tf_serving/tensorflow_serving', relative_path)
@staticmethod
def GetArgsKey(*args, **kwargs):
return args + tuple(sorted(kwargs.items()))
# Maps string key -> 2-tuple of 'host:port' string.
model_servers_dict = {}
@staticmethod
def RunServer(model_name,
model_path,
model_config_file=None,
monitoring_config_file=None,
batching_parameters_file=None,
grpc_channel_arguments='',
wait_for_server_ready=True,
pipe=None,
model_config_file_poll_period=None):
"""Run tensorflow_model_server using test config.
A unique instance of server is started for each set of arguments.
If called with same arguments, handle to an existing server is
returned.
Args:
model_name: Name of model.
model_path: Path to model.
model_config_file: Path to model config file.
monitoring_config_file: Path to the monitoring config file.
batching_parameters_file: Path to batching parameters.
grpc_channel_arguments: Custom gRPC args for server.
wait_for_server_ready: Wait for gRPC port to be ready.
pipe: subpipe.PIPE object to read stderr from server.
model_config_file_poll_period: Period for polling the
filesystem to discover new model configs.
Returns:
3-tuple (<Popen object>, <grpc host:port>, <rest host:port>).
Raises:
ValueError: when both model_path and config_file is empty.
"""
args_key = TensorflowModelServerTest.GetArgsKey(**locals())
if args_key in TensorflowModelServerTest.model_servers_dict:
return TensorflowModelServerTest.model_servers_dict[args_key]
port = PickUnusedPort()
rest_api_port = PickUnusedPort()
print(('Starting test server on port: {} for model_name: '
'{}/model_config_file: {}'.format(port, model_name,
model_config_file)))
command = os.path.join(
TensorflowModelServerTest.__TestSrcDirPath('model_servers'),
'tensorflow_model_server')
command += ' --port=' + str(port)
command += ' --rest_api_port=' + str(rest_api_port)
command += ' --rest_api_timeout_in_ms=' + str(HTTP_REST_TIMEOUT_MS)
command += ' --grpc_socket_path=' + GRPC_SOCKET_PATH
if model_config_file:
command += ' --model_config_file=' + model_config_file
elif model_path:
command += ' --model_name=' + model_name
command += ' --model_base_path=' + model_path
else:
raise ValueError('Both model_config_file and model_path cannot be empty!')
if monitoring_config_file:
command += ' --monitoring_config_file=' + monitoring_config_file
if model_config_file_poll_period is not None:
command += ' --model_config_file_poll_wait_seconds=' + str(
model_config_file_poll_period)
if batching_parameters_file:
command += ' --enable_batching'
command += ' --batching_parameters_file=' + batching_parameters_file
if grpc_channel_arguments:
command += ' --grpc_channel_arguments=' + grpc_channel_arguments
print(command)
proc = subprocess.Popen(shlex.split(command), stderr=pipe)
atexit.register(proc.kill)
print('Server started')
if wait_for_server_ready:
WaitForServerReady(port)
hostports = (
proc,
'localhost:' + str(port),
'localhost:' + str(rest_api_port),
)
TensorflowModelServerTest.model_servers_dict[args_key] = hostports
return hostports
def __BuildModelConfigFile(self):
"""Write a config file to disk for use in tests.
Substitutes placeholder for test directory with test directory path
in the configuration template file and writes it out to another file
used by the test.
"""
with open(self._GetGoodModelConfigTemplate(), 'r') as template_file:
config = template_file.read().replace('${TEST_HALF_PLUS_TWO_DIR}',
self._GetSavedModelBundlePath())
config = config.replace('${TEST_HALF_PLUS_THREE_DIR}',
self._GetSavedModelHalfPlusThreePath())
with open(self._GetGoodModelConfigFile(), 'w') as config_file:
config_file.write(config)
def setUp(self):
"""Sets up integration test parameters."""
self.testdata_dir = TensorflowModelServerTest.__TestSrcDirPath(
'servables/tensorflow/testdata')
self.temp_dir = tf.test.get_temp_dir()
self.server_proc = None
self.__BuildModelConfigFile()
def tearDown(self):
"""Deletes created configuration file."""
os.remove(self._GetGoodModelConfigFile())
def VerifyPredictRequest(
self,
model_server_address,
expected_output,
expected_version,
model_name='default',
specify_output=True,
batch_input=False,
signature_name=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY):
"""Send PredictionService.Predict request and verify output."""
print('Sending Predict request...')
# Prepare request
request = predict_pb2.PredictRequest()
request.model_spec.name = model_name
request.model_spec.signature_name = signature_name
request.inputs['x'].dtype = types_pb2.DT_FLOAT
request.inputs['x'].float_val.append(2.0)
dim = request.inputs['x'].tensor_shape.dim.add()
dim.size = 1
if batch_input:
request.inputs['x'].tensor_shape.dim.add().size = 1
if specify_output:
request.output_filter.append('y')
# Send request
channel = grpc.insecure_channel(model_server_address)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
result = stub.Predict(request, RPC_TIMEOUT) # 5 secs timeout
# Verify response
self.assertTrue('y' in result.outputs)
self.assertIs(types_pb2.DT_FLOAT, result.outputs['y'].dtype)
self.assertEqual(1, len(result.outputs['y'].float_val))
self.assertEqual(expected_output, result.outputs['y'].float_val[0])
self._VerifyModelSpec(result.model_spec, request.model_spec.name,
signature_name, expected_version)
def _GetSavedModelBundlePath(self):
"""Returns a path to a model in SavedModel format."""
return os.path.join(os.environ['TEST_SRCDIR'], 'tf_serving/external/org_tensorflow/tensorflow/',
'cc/saved_model/testdata/half_plus_two')
def _GetModelVersion(self, model_path):
"""Returns version of SavedModel/SessionBundle in given path.
This method assumes there is exactly one directory with an 'int' valued
directory name under `model_path`.
Args:
model_path: A string representing path to the SavedModel/SessionBundle.
Returns:
version of SavedModel/SessionBundle in given path.
"""
return int(os.listdir(model_path)[0])
def _GetSavedModelHalfPlusThreePath(self):
"""Returns a path to a half_plus_three model in SavedModel format."""
return os.path.join(self.testdata_dir, 'saved_model_half_plus_three')
def _GetSessionBundlePath(self):
"""Returns a path to a model in SessionBundle format."""
return os.path.join(self.testdata_dir, 'half_plus_two')
def _GetGoodModelConfigTemplate(self):
"""Returns a path to a working configuration file template."""
return os.path.join(self.testdata_dir, 'good_model_config.txt')
def _GetGoodModelConfigFile(self):
"""Returns a path to a working configuration file."""
return os.path.join(self.temp_dir, 'good_model_config.conf')
def _GetBadModelConfigFile(self):
"""Returns a path to a improperly formatted configuration file."""
return os.path.join(self.testdata_dir, 'bad_model_config.txt')
def _GetBatchingParametersFile(self):
"""Returns a path to a batching configuration file."""
return os.path.join(self.testdata_dir, 'batching_config.txt')
def _GetModelMetadataFile(self):
"""Returns a path to a sample model metadata file."""
return os.path.join(self.testdata_dir, 'half_plus_two_model_metadata.json')
def _GetMonitoringConfigFile(self):
"""Returns a path to a monitoring configuration file."""
return os.path.join(self.testdata_dir, 'monitoring_config.txt')
def _VerifyModelSpec(self,
actual_model_spec,
exp_model_name,
exp_signature_name,
exp_version):
"""Verifies model_spec matches expected model name, signature, version.
Args:
actual_model_spec: An instance of ModelSpec proto.
exp_model_name: A string that represents expected model name.
exp_signature_name: A string that represents expected signature.
exp_version: An integer that represents expected version.
Returns:
None.
"""
self.assertEqual(actual_model_spec.name, exp_model_name)
self.assertEqual(actual_model_spec.signature_name, exp_signature_name)
self.assertEqual(actual_model_spec.version.value, exp_version)
def testGetModelStatus(self):
"""Test ModelService.GetModelStatus implementation."""
model_path = self._GetSavedModelBundlePath()
model_server_address = TensorflowModelServerTest.RunServer(
'default', model_path)[1]
print('Sending GetModelStatus request...')
# Send request
request = get_model_status_pb2.GetModelStatusRequest()
request.model_spec.name = 'default'
channel = grpc.insecure_channel(model_server_address)
stub = model_service_pb2_grpc.ModelServiceStub(channel)
result = stub.GetModelStatus(request, RPC_TIMEOUT) # 5 secs timeout
# Verify response
self.assertEqual(1, len(result.model_version_status))
self.assertEqual(123, result.model_version_status[0].version)
# OK error code (0) indicates no error occurred
self.assertEqual(0, result.model_version_status[0].status.error_code)
def testClassify(self):
"""Test PredictionService.Classify implementation."""
model_path = self._GetSavedModelBundlePath()
model_server_address = TensorflowModelServerTest.RunServer(
'default', model_path)[1]
print('Sending Classify request...')
# Prepare request
request = classification_pb2.ClassificationRequest()
request.model_spec.name = 'default'
request.model_spec.signature_name = 'classify_x_to_y'
example = request.input.example_list.examples.add()
example.features.feature['x'].float_list.value.extend([2.0])
# Send request
channel = grpc.insecure_channel(model_server_address)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
result = stub.Classify(request, RPC_TIMEOUT) # 5 secs timeout
# Verify response
self.assertEqual(1, len(result.result.classifications))
self.assertEqual(1, len(result.result.classifications[0].classes))
expected_output = 3.0
self.assertEqual(expected_output,
result.result.classifications[0].classes[0].score)
self._VerifyModelSpec(result.model_spec, request.model_spec.name,
request.model_spec.signature_name,
self._GetModelVersion(model_path))
def testRegress(self):
"""Test PredictionService.Regress implementation."""
model_path = self._GetSavedModelBundlePath()
model_server_address = TensorflowModelServerTest.RunServer(
'default', model_path)[1]
print('Sending Regress request...')
# Prepare request
request = regression_pb2.RegressionRequest()
request.model_spec.name = 'default'
request.model_spec.signature_name = 'regress_x_to_y'
example = request.input.example_list.examples.add()
example.features.feature['x'].float_list.value.extend([2.0])
# Send request
channel = grpc.insecure_channel(model_server_address)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
result = stub.Regress(request, RPC_TIMEOUT) # 5 secs timeout
# Verify response
self.assertEqual(1, len(result.result.regressions))
expected_output = 3.0
self.assertEqual(expected_output, result.result.regressions[0].value)
self._VerifyModelSpec(result.model_spec, request.model_spec.name,
request.model_spec.signature_name,
self._GetModelVersion(model_path))
def testMultiInference(self):
"""Test PredictionService.MultiInference implementation."""
model_path = self._GetSavedModelBundlePath()
model_server_address = TensorflowModelServerTest.RunServer(
'default', model_path)[1]
print('Sending MultiInference request...')
# Prepare request
request = inference_pb2.MultiInferenceRequest()
request.tasks.add().model_spec.name = 'default'
request.tasks[0].model_spec.signature_name = 'regress_x_to_y'
request.tasks[0].method_name = 'tensorflow/serving/regress'
request.tasks.add().model_spec.name = 'default'
request.tasks[1].model_spec.signature_name = 'classify_x_to_y'
request.tasks[1].method_name = 'tensorflow/serving/classify'
example = request.input.example_list.examples.add()
example.features.feature['x'].float_list.value.extend([2.0])
# Send request
channel = grpc.insecure_channel(model_server_address)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
result = stub.MultiInference(request, RPC_TIMEOUT) # 5 secs timeout
# Verify response
self.assertEqual(2, len(result.results))
expected_output = 3.0
self.assertEqual(expected_output,
result.results[0].regression_result.regressions[0].value)
self.assertEqual(
expected_output, result.results[1].classification_result
.classifications[0].classes[0].score)
for i in range(2):
self._VerifyModelSpec(result.results[i].model_spec,
request.tasks[i].model_spec.name,
request.tasks[i].model_spec.signature_name,
self._GetModelVersion(model_path))
def _TestPredict(
self,
model_path,
batching_parameters_file=None,
signature_name=signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY):
"""Helper method to test prediction.
Args:
model_path: Path to the model on disk.
batching_parameters_file: Batching parameters file to use (if None
batching is not enabled).
signature_name: Signature name to expect in the PredictResponse.
"""
model_server_address = TensorflowModelServerTest.RunServer(
'default',
model_path,
batching_parameters_file=batching_parameters_file)[1]
expected_version = self._GetModelVersion(model_path)
self.VerifyPredictRequest(model_server_address, expected_output=3.0,
expected_version=expected_version,
signature_name=signature_name)
self.VerifyPredictRequest(
model_server_address, expected_output=3.0, specify_output=False,
expected_version=expected_version, signature_name=signature_name)
def testPredictBatching(self):
"""Test PredictionService.Predict implementation with SessionBundle."""
self._TestPredict(
self._GetSessionBundlePath(),
batching_parameters_file=self._GetBatchingParametersFile())
def testPredictSavedModel(self):
"""Test PredictionService.Predict implementation with SavedModel."""
self._TestPredict(self._GetSavedModelBundlePath())
def testPredictUpconvertedSavedModel(self):
"""Test PredictionService.Predict implementation.
Using a SessionBundle converted to a SavedModel.
"""
self._TestPredict(self._GetSessionBundlePath())
def _TestBadModel(self):
"""Helper method to test against a bad model export."""
# Both SessionBundle and SavedModel use the same bad model path, but in the
# case of SavedModel, the export will get up-converted to a SavedModel.
# As the bad model will prevent the server from becoming ready, we set the
# wait_for_server_ready param to False to avoid blocking/timing out.
model_path = os.path.join(self.testdata_dir, 'bad_half_plus_two'),
model_server_address = TensorflowModelServerTest.RunServer(
'default', model_path, wait_for_server_ready=False)[1]
with self.assertRaises(grpc.RpcError) as ectxt:
self.VerifyPredictRequest(
model_server_address, expected_output=3.0,
expected_version=self._GetModelVersion(model_path),
signature_name='')
self.assertIs(grpc.StatusCode.FAILED_PRECONDITION,
ectxt.exception.code())
def _TestBadModelUpconvertedSavedModel(self):
"""Test Predict against a bad upconverted SavedModel model export."""
self._TestBadModel()
def testGoodModelConfig(self):
"""Test server configuration from file works with valid configuration."""
model_server_address = TensorflowModelServerTest.RunServer(
None, None, model_config_file=self._GetGoodModelConfigFile())[1]
self.VerifyPredictRequest(
model_server_address, model_name='half_plus_two', expected_output=3.0,
expected_version=self._GetModelVersion(self._GetSavedModelBundlePath()))
self.VerifyPredictRequest(
model_server_address, model_name='half_plus_two',
expected_output=3.0, specify_output=False,
expected_version=self._GetModelVersion(self._GetSavedModelBundlePath()))
self.VerifyPredictRequest(
model_server_address, model_name='half_plus_three', expected_output=4.0,
expected_version=self._GetModelVersion(
self._GetSavedModelHalfPlusThreePath()))
self.VerifyPredictRequest(
model_server_address, model_name='half_plus_three', expected_output=4.0,
specify_output=False,
expected_version=self._GetModelVersion(
self._GetSavedModelHalfPlusThreePath()))
def testBadModelConfig(self):
"""Test server model configuration from file fails for invalid file."""
proc = TensorflowModelServerTest.RunServer(
None,
None,
model_config_file=self._GetBadModelConfigFile(),
pipe=subprocess.PIPE,
wait_for_server_ready=False)[0]
error_message = ('Error parsing text-format '
'tensorflow.serving.ModelServerConfig')
error_message = error_message.encode('utf-8')
self.assertNotEqual(proc.stderr, None)
self.assertGreater(proc.stderr.read().find(error_message), -1)
def testModelConfigReload(self):
"""Test model server polls filesystem for model configuration."""
base_config_proto = """
model_config_list: {{
config: {{
name: "{name}",
base_path: "{model_path}",
model_platform: "tensorflow"
}}
}}
"""
config_path = os.path.join(FLAGS.test_tmpdir, 'model_config.txt')
# Write a config file serving half_plus_two model
with open(config_path, 'w') as f:
f.write(
base_config_proto.format(
name='half_plus_two', model_path=self._GetSavedModelBundlePath()))
poll_period = 1
model_server_address = TensorflowModelServerTest.RunServer(
None,
None,
model_config_file=config_path,
model_config_file_poll_period=poll_period)[1]
self.VerifyPredictRequest(
model_server_address,
model_name='half_plus_two',
expected_output=3.0,
specify_output=False,
expected_version=self._GetModelVersion(self._GetSavedModelBundlePath()))
# Rewrite the config file with half_plus_three model
with open(config_path, 'w') as f:
f.write(
base_config_proto.format(
name='half_plus_three',
model_path=self._GetSavedModelHalfPlusThreePath()))
# Give modelserver time to poll and load the new config
time.sleep(poll_period + 1)
# Verify new model config was realized in model server
self.VerifyPredictRequest(
model_server_address,
model_name='half_plus_three',
expected_output=4.0,
specify_output=False,
expected_version=self._GetModelVersion(
self._GetSavedModelHalfPlusThreePath()))
def testModelConfigReloadWithZeroPollPeriod(self):
"""Test model server does not poll filesystem for model config."""
base_config_proto = """
model_config_list: {{
config: {{
name: "{name}",
base_path: "{model_path}",
model_platform: "tensorflow"
}}
}}
"""
config_path = os.path.join(FLAGS.test_tmpdir, 'model_config.txt')
# Write a config file serving half_plus_two model
with open(config_path, 'w') as f:
f.write(
base_config_proto.format(
name='half_plus_two', model_path=self._GetSavedModelBundlePath()))
poll_period = 0
model_server_address = TensorflowModelServerTest.RunServer(
None,
None,
model_config_file=config_path,
model_config_file_poll_period=poll_period)[1]
self.VerifyPredictRequest(
model_server_address,
model_name='half_plus_two',
expected_output=3.0,
specify_output=False,
expected_version=self._GetModelVersion(self._GetSavedModelBundlePath()))
# Rewrite the config file with half_plus_three model
with open(config_path, 'w') as f:
f.write(
base_config_proto.format(
name='half_plus_three',
model_path=self._GetSavedModelHalfPlusThreePath()))
# Give modelserver enough time to poll and load the new config should it
# have such a desire
time.sleep(poll_period + 1)
# Verify model server is still serving the old model config
self.VerifyPredictRequest(
model_server_address,
model_name='half_plus_two',
expected_output=3.0,
specify_output=False,
expected_version=self._GetModelVersion(
self._GetSavedModelHalfPlusThreePath()))
def testGoodGrpcChannelArgs(self):
"""Test server starts with grpc_channel_arguments specified."""
model_server_address = TensorflowModelServerTest.RunServer(
'default',
self._GetSavedModelBundlePath(),
grpc_channel_arguments=
'grpc.max_connection_age_ms=2000,grpc.lb_policy_name=grpclb')[1]
self.VerifyPredictRequest(
model_server_address,
expected_output=3.0,
specify_output=False,
expected_version=self._GetModelVersion(
self._GetSavedModelHalfPlusThreePath()))
def testClassifyREST(self):
"""Test Classify implementation over REST API."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer('default',
model_path)[2].split(':')
# Prepare request
url = 'http://{}:{}/v1/models/default:classify'.format(host, port)
json_req = {'signature_name': 'classify_x_to_y', 'examples': [{'x': 2.0}]}
# Send request
resp_data = None
try:
resp_data = CallREST(url, json_req)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
# Verify response
self.assertEqual(json.loads(resp_data), {'results': [[['', 3.0]]]})
def testRegressREST(self):
"""Test Regress implementation over REST API."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer('default',
model_path)[2].split(':')
# Prepare request
url = 'http://{}:{}/v1/models/default:regress'.format(host, port)
json_req = {'signature_name': 'regress_x_to_y', 'examples': [{'x': 2.0}]}
# Send request
resp_data = None
try:
resp_data = CallREST(url, json_req)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
# Verify response
self.assertEqual(json.loads(resp_data), {'results': [3.0]})
def testPredictREST(self):
"""Test Predict implementation over REST API."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer('default',
model_path)[2].split(':')
# Prepare request
url = 'http://{}:{}/v1/models/default:predict'.format(host, port)
json_req = {'instances': [2.0, 3.0, 4.0]}
# Send request
resp_data = None
try:
resp_data = CallREST(url, json_req)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
# Verify response
self.assertEqual(json.loads(resp_data), {'predictions': [3.0, 3.5, 4.0]})
def testPredictColumnarREST(self):
"""Test Predict implementation over REST API with columnar inputs."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer('default',
model_path)[2].split(':')
# Prepare request
url = 'http://{}:{}/v1/models/default:predict'.format(host, port)
json_req = {'inputs': [2.0, 3.0, 4.0]}
# Send request
resp_data = None
try:
resp_data = CallREST(url, json_req)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
# Verify response
self.assertEqual(json.loads(resp_data), {'outputs': [3.0, 3.5, 4.0]})
def testGetStatusREST(self):
"""Test ModelStatus implementation over REST API with columnar inputs."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer('default',
model_path)[2].split(':')
# Prepare request
url = 'http://{}:{}/v1/models/default'.format(host, port)
# Send request
resp_data = None
try:
resp_data = CallREST(url, None)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
# Verify response
self.assertEqual(
json.loads(resp_data), {
'model_version_status': [{
'version': '123',
'state': 'AVAILABLE',
'status': {
'error_code': 'OK',
'error_message': ''
}
}]
})
def testGetModelMetadataREST(self):
"""Test ModelStatus implementation over REST API with columnar inputs."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer('default',
model_path)[2].split(':')
# Prepare request
url = 'http://{}:{}/v1/models/default/metadata'.format(host, port)
# Send request
resp_data = None
try:
resp_data = CallREST(url, None)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
try:
model_metadata_file = self._GetModelMetadataFile()
with open(model_metadata_file) as f:
expected_metadata = json.load(f)
# Verify response
self.assertEqual(json.loads(resp_data), expected_metadata)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
def testPrometheusEndpoint(self):
"""Test ModelStatus implementation over REST API with columnar inputs."""
model_path = self._GetSavedModelBundlePath()
host, port = TensorflowModelServerTest.RunServer(
'default',
model_path,
monitoring_config_file=self._GetMonitoringConfigFile())[2].split(':')
# Prepare request
url = 'http://{}:{}/monitoring/prometheus/metrics'.format(host, port)
# Send request
resp_data = None
try:
resp_data = CallREST(url, None)
except Exception as e: # pylint: disable=broad-except
self.fail('Request failed with error: {}'.format(e))
# Verify that there should be some metric type information.
self.assertIn('# TYPE',
resp_data.decode('utf-8') if resp_data is not None else None)
def testPredictUDS(self):
"""Test saved model prediction over a Unix domain socket."""
_ = TensorflowModelServerTest.RunServer('default',
self._GetSavedModelBundlePath())
model_server_address = 'unix:%s' % GRPC_SOCKET_PATH
self.VerifyPredictRequest(
model_server_address,
expected_output=3.0,
specify_output=False,
expected_version=self._GetModelVersion(
self._GetSavedModelHalfPlusThreePath()))
def test_tf_saved_model_save(self):
base_path = os.path.join(self.get_temp_dir(), 'tf_saved_model_save')
export_path = os.path.join(base_path, '00000123')
root = tf.train.Checkpoint()
root.v1 = tf.Variable(3.)
root.v2 = tf.Variable(2.)
root.f = tf.function(
lambda x: {'y': root.v1 * root.v2 * x})
to_save = root.f.get_concrete_function(tf.TensorSpec(None, tf.float32))
tf.saved_model.experimental.save(root, export_path, to_save)
_, model_server_address, _ = TensorflowModelServerTest.RunServer(
'default', base_path)
expected_version = self._GetModelVersion(base_path)
self.VerifyPredictRequest(
model_server_address,
expected_output=12.0,
specify_output=False,
expected_version=expected_version)
def test_tf_saved_model_save_multiple_signatures(self):
base_path = os.path.join(self.get_temp_dir(), 'tf_saved_model_save')
export_path = os.path.join(base_path, '00000123')
root = tf.train.Checkpoint()
root.f = tf.function(lambda x: {'y': 1.},
input_signature=[tf.TensorSpec(None, tf.float32)])
root.g = tf.function(lambda x: {'y': 2.},
input_signature=[tf.TensorSpec(None, tf.float32)])
tf.saved_model.experimental.save(
root, export_path,
signatures={
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: root.f,
'custom_signature_key': root.g})
_, model_server_address, _ = TensorflowModelServerTest.RunServer(
'default', base_path)
expected_version = self._GetModelVersion(base_path)
self.VerifyPredictRequest(
model_server_address,
expected_output=2.0,
expected_version=expected_version,
signature_name='custom_signature_key')
self.VerifyPredictRequest(
model_server_address,
expected_output=1.0,
expected_version=expected_version)
def test_sequential_keras_saved_model_save(self):
"""Test loading a simple SavedModel created with Keras Sequential API."""
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Input(dtype='float32', shape=(1,), name='x'))
model.add(tf.keras.layers.Lambda(lambda x: x, name='y'))
base_path = os.path.join(self.get_temp_dir(),
'keras_sequential_saved_model_save')
export_path = os.path.join(base_path, '00000123')
tf.saved_model.save(model, export_path)
_, model_server_address, _ = TensorflowModelServerTest.RunServer(
'default', base_path)
expected_version = self._GetModelVersion(base_path)
self.VerifyPredictRequest(
model_server_address,
batch_input=True,
specify_output=False,
expected_output=2.0,
expected_version=expected_version)
def test_distrat_sequential_keras_saved_model_save(self):
"""Test loading a Keras SavedModel with tf.distribute."""
# You need to call SetVirtualCpus in test setUp with the maximum value
# needed in any test if you use this in multiple tests. For now this is the
# only test using this functionality.
SetVirtualCpus(2)
strategy = tf.distribute.MirroredStrategy(devices=('/cpu:0', '/cpu:1'))
with strategy.scope():
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Input(dtype='float32', shape=(1,), name='x'))
model.add(tf.keras.layers.Dense(1, kernel_initializer='ones',
bias_initializer='zeros'))
model.add(tf.keras.layers.Lambda(lambda x: x, name='y'))
base_path = os.path.join(self.get_temp_dir(),
'keras_sequential_saved_model_save')
export_path = os.path.join(base_path, '00000123')
tf.saved_model.save(model, export_path)
_, model_server_address, _ = TensorflowModelServerTest.RunServer(
'default', base_path)
expected_version = self._GetModelVersion(base_path)
self.VerifyPredictRequest(
model_server_address,
batch_input=True,
specify_output=False,
expected_output=2.0,
expected_version=expected_version)
if __name__ == '__main__':
tf.enable_eager_execution()
tf.test.main()
| 38.630928
| 100
| 0.68774
|
d690fa555cba1fa477b4997c7181c30f43acdb97
| 232,091
|
py
|
Python
|
Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
|
Nathan6e/content
|
c4c29c03a71308d73d3be0bccad8da10d44f6b5d
|
[
"MIT"
] | null | null | null |
Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
|
Nathan6e/content
|
c4c29c03a71308d73d3be0bccad8da10d44f6b5d
|
[
"MIT"
] | null | null | null |
Packs/Base/Scripts/CommonServerPython/CommonServerPython_test.py
|
Nathan6e/content
|
c4c29c03a71308d73d3be0bccad8da10d44f6b5d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import demistomock as demisto
import copy
import json
import re
import os
import sys
import requests
from pytest import raises, mark
import pytest
import warnings
from CommonServerPython import set_to_integration_context_with_retries, xml2json, json2xml, entryTypes, formats, tableToMarkdown, underscoreToCamelCase, \
flattenCell, date_to_timestamp, datetime, camelize, pascalToSpace, argToList, \
remove_nulls_from_dictionary, is_error, get_error, hash_djb2, fileResult, is_ip_valid, get_demisto_version, \
IntegrationLogger, parse_date_string, IS_PY3, PY_VER_MINOR, DebugLogger, b64_encode, parse_date_range, return_outputs, \
argToBoolean, ipv4Regex, ipv4cidrRegex, ipv6cidrRegex, ipv6Regex, batch, FeedIndicatorType, \
encode_string_results, safe_load_json, remove_empty_elements, aws_table_to_markdown, is_demisto_version_ge, \
appendContext, auto_detect_indicator_type, handle_proxy, get_demisto_version_as_str, get_x_content_info_headers, \
url_to_clickable_markdown, WarningsHandler, DemistoException, SmartGetDict, JsonTransformer
import CommonServerPython
try:
from StringIO import StringIO
except ImportError:
# Python 3
from io import StringIO # noqa
INFO = {'b': 1,
'a': {
'safd': 3,
'b': [
{'c': {'d': 432}, 'd': 2},
{'c': {'f': 1}},
{'b': 1234},
{'c': {'d': 4567}},
{'c': {'d': 11}},
{'c': {'d': u'asdf'}}],
'c': {'d': 10},
}
}
@pytest.fixture()
def clear_version_cache():
"""
Clear the version cache at end of the test (in case we mocked demisto.serverVersion)
"""
yield
get_demisto_version._version = None
@pytest.fixture(autouse=True)
def handle_calling_context(mocker):
mocker.patch.object(CommonServerPython, 'get_integration_name', return_value='Test')
def test_xml():
import json
xml = b"<work><employee><id>100</id><name>foo</name></employee><employee><id>200</id><name>goo</name>" \
b"</employee></work>"
jsonExpected = '{"work": {"employee": [{"id": "100", "name": "foo"}, {"id": "200", "name": "goo"}]}}'
jsonActual = xml2json(xml)
assert jsonActual == jsonExpected, "expected\n" + jsonExpected + "\n to equal \n" + jsonActual
jsonDict = json.loads(jsonActual)
assert jsonDict['work']['employee'][0]['id'] == "100", 'id of first employee must be 100'
assert jsonDict['work']['employee'][1]['name'] == "goo", 'name of second employee must be goo'
xmlActual = json2xml(jsonActual)
assert xmlActual == xml, "expected:\n{}\nto equal:\n{}".format(xml, xmlActual)
def toEntry(table):
return {
'Type': entryTypes['note'],
'Contents': table,
'ContentsFormat': formats['table'],
'ReadableContentsFormat': formats['markdown'],
'HumanReadable': table
}
def test_is_ip_valid():
valid_ip_v6 = "FE80:0000:0000:0000:0202:B3FF:FE1E:8329"
valid_ip_v6_b = "FE80::0202:B3FF:FE1E:8329"
invalid_ip_v6 = "KKKK:0000:0000:0000:0202:B3FF:FE1E:8329"
valid_ip_v4 = "10.10.10.10"
invalid_ip_v4 = "10.10.10.9999"
invalid_not_ip_with_ip_structure = "1.1.1.1.1.1.1.1.1.1.1.1.1.1.1"
not_ip = "Demisto"
assert not is_ip_valid(valid_ip_v6)
assert is_ip_valid(valid_ip_v6, True)
assert is_ip_valid(valid_ip_v6_b, True)
assert not is_ip_valid(invalid_ip_v6, True)
assert not is_ip_valid(not_ip, True)
assert is_ip_valid(valid_ip_v4)
assert not is_ip_valid(invalid_ip_v4)
assert not is_ip_valid(invalid_not_ip_with_ip_structure)
DATA = [
{
'header_1': 'a1',
'header_2': 'b1',
'header_3': 'c1'
},
{
'header_1': 'a2',
'header_2': 'b2',
'header_3': 'c2'
},
{
'header_1': 'a3',
'header_2': 'b3',
'header_3': 'c3'
}
]
TABLE_TO_MARKDOWN_ONLY_DATA_PACK = [
(
DATA,
'''### tableToMarkdown test
|header_1|header_2|header_3|
|---|---|---|
| a1 | b1 | c1 |
| a2 | b2 | c2 |
| a3 | b3 | c3 |
'''
),
(
[
{
'header_1|with_pipe': 'a1',
'header_2': 'b1',
},
{
'header_1|with_pipe': 'a2',
'header_2': 'b2',
}
],
'''### tableToMarkdown test
|header_1\\|with_pipe|header_2|
|---|---|
| a1 | b1 |
| a2 | b2 |
'''
)
]
DATA_WITH_URLS = [(
[
{
'header_1': 'a1',
'url1': 'b1',
'url2': 'c1'
},
{
'header_1': 'a2',
'url1': 'b2',
'url2': 'c2'
},
{
'header_1': 'a3',
'url1': 'b3',
'url2': 'c3'
}
],
'''### tableToMarkdown test
|header_1|url1|url2|
|---|---|---|
| a1 | [b1](b1) | [c1](c1) |
| a2 | [b2](b2) | [c2](c2) |
| a3 | [b3](b3) | [c3](c3) |
'''
)]
COMPLEX_DATA_WITH_URLS = [(
[
{'data':
{'id': '1',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': 'url'
}
]
},
'links': ['link']
}
},
{'data':
{'id': '2',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': 'url'
}
]
},
'links': ['link']
}
}
],
[
{'data':
{'id': '1',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': '[url](url)'
}
]
},
'links': ['[link](link)']
}
},
{'data':
{'id': '2',
'result':
{'files':
[
{
'filename': 'name',
'size': 0,
'url': '[url](url)'
}
]
},
'links': ['[link](link)']
}
}
])]
class TestTableToMarkdown:
@pytest.mark.parametrize('data, expected_table', TABLE_TO_MARKDOWN_ONLY_DATA_PACK)
def test_sanity(self, data, expected_table):
"""
Given:
- list of objects.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
"""
table = tableToMarkdown('tableToMarkdown test', data)
assert table == expected_table
@staticmethod
def test_header_transform_underscoreToCamelCase():
"""
Given:
- list of objects.
- an header transformer.
When:
- calling tableToMarkdown.
Then:
- return a valid table with updated headers.
"""
# header transform
table = tableToMarkdown('tableToMarkdown test with headerTransform', DATA,
headerTransform=underscoreToCamelCase)
expected_table = (
'### tableToMarkdown test with headerTransform\n'
'|Header1|Header2|Header3|\n'
'|---|---|---|\n'
'| a1 | b1 | c1 |\n'
'| a2 | b2 | c2 |\n'
'| a3 | b3 | c3 |\n'
)
assert table == expected_table
@staticmethod
def test_multiline():
"""
Given:
- list of objects.
- some values contains a new line and the "|" sign.
When:
- calling tableToMarkdown.
Then:
- return a valid table with "br" tags instead of new lines and escaped pipe sign.
"""
data = copy.deepcopy(DATA)
for i, d in enumerate(data):
d['header_2'] = 'b%d.1\nb%d.2' % (i + 1, i + 1,)
d['header_3'] = 'c%d|1' % (i + 1,)
table = tableToMarkdown('tableToMarkdown test with multiline', data)
expected_table = (
'### tableToMarkdown test with multiline\n'
'|header_1|header_2|header_3|\n'
'|---|---|---|\n'
'| a1 | b1.1<br>b1.2 | c1\|1 |\n'
'| a2 | b2.1<br>b2.2 | c2\|1 |\n'
'| a3 | b3.1<br>b3.2 | c3\|1 |\n'
)
assert table == expected_table
@staticmethod
def test_url():
"""
Given:
- list of objects.
- some values contain a URL.
- some values are missing.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
"""
data = copy.deepcopy(DATA)
for d in data:
d['header_2'] = None
d['header_3'] = '[url](https:\\demisto.com)'
table_url_missing_info = tableToMarkdown('tableToMarkdown test with url and missing info', data)
expected_table_url_missing_info = (
'### tableToMarkdown test with url and missing info\n'
'|header_1|header_2|header_3|\n'
'|---|---|---|\n'
'| a1 | | [url](https:\demisto.com) |\n'
'| a2 | | [url](https:\demisto.com) |\n'
'| a3 | | [url](https:\demisto.com) |\n'
)
assert table_url_missing_info == expected_table_url_missing_info
@staticmethod
def test_single_column():
"""
Given:
- list of objects.
- a single header.
When:
- calling tableToMarkdown.
Then:
- return a valid column style table.
"""
# single column table
table_single_column = tableToMarkdown('tableToMarkdown test with single column', DATA, ['header_1'])
expected_table_single_column = (
'### tableToMarkdown test with single column\n'
'|header_1|\n'
'|---|\n'
'| a1 |\n'
'| a2 |\n'
'| a3 |\n'
)
assert table_single_column == expected_table_single_column
@staticmethod
def test_list_values():
"""
Given:
- list of objects.
- some values are lists.
When:
- calling tableToMarkdown.
Then:
- return a valid table where the list values are comma-separated and each item in a new line.
"""
# list values
data = copy.deepcopy(DATA)
for i, d in enumerate(data):
d['header_3'] = [i + 1, 'second item']
d['header_2'] = 'hi'
table_list_field = tableToMarkdown('tableToMarkdown test with list field', data)
expected_table_list_field = (
'### tableToMarkdown test with list field\n'
'|header_1|header_2|header_3|\n'
'|---|---|---|\n'
'| a1 | hi | 1,<br>second item |\n'
'| a2 | hi | 2,<br>second item |\n'
'| a3 | hi | 3,<br>second item |\n'
)
assert table_list_field == expected_table_list_field
@staticmethod
def test_empty_fields():
"""
Given:
- list of objects.
- all values are empty.
When:
- calling tableToMarkdown with removeNull=false.
- calling tableToMarkdown with removeNull=true.
Then:
- return an empty table.
- return a "no results" message.
"""
data = [
{
'a': None,
'b': None,
'c': None,
} for _ in range(3)
]
table_all_none = tableToMarkdown('tableToMarkdown test with all none fields', data)
expected_table_all_none = (
'### tableToMarkdown test with all none fields\n'
'|a|b|c|\n'
'|---|---|---|\n'
'| | | |\n'
'| | | |\n'
'| | | |\n'
)
assert table_all_none == expected_table_all_none
# all fields are empty - removed
table_all_none2 = tableToMarkdown('tableToMarkdown test with all none fields2', data, removeNull=True)
expected_table_all_none2 = '''### tableToMarkdown test with all none fields2
**No entries.**
'''
assert table_all_none2 == expected_table_all_none2
@staticmethod
def test_header_not_on_first_object():
"""
Given:
- list of objects
- list of headers with header that doesn't appear in the first object.
When:
- calling tableToMarkdown.
Then:
- return a valid table with the extra header.
"""
# header not on first object
data = copy.deepcopy(DATA)
data[1]['extra_header'] = 'sample'
table_extra_header = tableToMarkdown('tableToMarkdown test with extra header', data,
headers=['header_1', 'header_2', 'extra_header'])
expected_table_extra_header = (
'### tableToMarkdown test with extra header\n'
'|header_1|header_2|extra_header|\n'
'|---|---|---|\n'
'| a1 | b1 | |\n'
'| a2 | b2 | sample |\n'
'| a3 | b3 | |\n'
)
assert table_extra_header == expected_table_extra_header
@staticmethod
def test_no_header():
"""
Given:
- list of objects.
- a list with non-existing headers.
When:
- calling tableToMarkdown.
Then:
- return a "no result" message.
"""
# no header
table_no_headers = tableToMarkdown('tableToMarkdown test with no headers', DATA,
headers=['no', 'header', 'found'], removeNull=True)
expected_table_no_headers = (
'### tableToMarkdown test with no headers\n'
'**No entries.**\n'
)
assert table_no_headers == expected_table_no_headers
@staticmethod
def test_dict_value():
"""
Given:
- list of objects.
- some values are lists.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
"""
# dict value
data = copy.deepcopy(DATA)
data[1]['extra_header'] = {'sample': 'qwerty', 'sample2': '`asdf'}
table_dict_record = tableToMarkdown('tableToMarkdown test with dict record', data,
headers=['header_1', 'header_2', 'extra_header'])
expected_dict_record = (
'### tableToMarkdown test with dict record\n'
'|header_1|header_2|extra_header|\n'
'|---|---|---|\n'
'| a1 | b1 | |\n'
'| a2 | b2 | sample: qwerty<br>sample2: \\`asdf |\n'
'| a3 | b3 | |\n'
)
assert table_dict_record == expected_dict_record
@staticmethod
def test_string_header():
"""
Given:
- list of objects.
- a single header as a string.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
"""
# string header (instead of list)
table_string_header = tableToMarkdown('tableToMarkdown string header', DATA, 'header_1')
expected_string_header_tbl = (
'### tableToMarkdown string header\n'
'|header_1|\n'
'|---|\n'
'| a1 |\n'
'| a2 |\n'
'| a3 |\n'
)
assert table_string_header == expected_string_header_tbl
@staticmethod
def test_list_of_strings_instead_of_dict():
"""
Given:
- list of strings.
- a single header as a list.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
"""
# list of string values instead of list of dict objects
table_string_array = tableToMarkdown('tableToMarkdown test with string array', ['foo', 'bar', 'katz'],
['header_1'])
expected_string_array_tbl = (
'### tableToMarkdown test with string array\n'
'|header_1|\n'
'|---|\n'
'| foo |\n'
'| bar |\n'
'| katz |\n'
)
assert table_string_array == expected_string_array_tbl
@staticmethod
def test_list_of_strings_instead_of_dict_and_string_header():
"""
Given:
- list of strings.
- a single header as a string.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
"""
# combination: string header + string values list
table_string_array_string_header = tableToMarkdown('tableToMarkdown test with string array and string header',
['foo', 'bar', 'katz'], 'header_1')
expected_string_array_string_header_tbl = (
'### tableToMarkdown test with string array and string header\n'
'|header_1|\n'
'|---|\n'
'| foo |\n'
'| bar |\n'
'| katz |\n'
)
assert table_string_array_string_header == expected_string_array_string_header_tbl
@staticmethod
def test_single_key_dict():
# combination: string header + string values list
table_single_key_dict = tableToMarkdown('tableToMarkdown test with single key dict',
{'single': ['Arthur', 'Blob', 'Cactus']})
expected_single_key_dict_tbl = (
'### tableToMarkdown test with single key dict\n'
'|single|\n'
'|---|\n'
'| Arthur |\n'
'| Blob |\n'
'| Cactus |\n'
)
assert table_single_key_dict == expected_single_key_dict_tbl
@staticmethod
def test_dict_with_special_character():
"""
When:
- calling tableToMarkdown.
Given:
- list of objects.
- some values contain special characters.
Then:
- return a valid table.
"""
data = {
'header_1': u'foo',
'header_2': [u'\xe2.rtf']
}
table_with_character = tableToMarkdown('tableToMarkdown test with special character', data)
expected_string_with_special_character = '''### tableToMarkdown test with special character
|header_1|header_2|
|---|---|
| foo | â.rtf |
'''
assert table_with_character == expected_string_with_special_character
@staticmethod
def test_title_with_special_character():
"""
When:
- calling tableToMarkdown.
Given:
- a title with a special character.
Then:
- return a valid table.
"""
data = {
'header_1': u'foo'
}
table_with_character = tableToMarkdown('tableToMarkdown test with special character Ù', data)
expected_string_with_special_character = (
'### tableToMarkdown test with special character Ù\n'
'|header_1|\n'
'|---|\n'
'| foo |\n'
)
assert table_with_character == expected_string_with_special_character
@pytest.mark.parametrize('data, expected_table', DATA_WITH_URLS)
def test_clickable_url(self, data, expected_table):
"""
Given:
- list of objects.
- some values are URLs.
When:
- calling tableToMarkdown.
Then:
- return a valid table with clickable URLs.
"""
table = tableToMarkdown('tableToMarkdown test', data, url_keys=['url1', 'url2'])
assert table == expected_table
@staticmethod
def test_keep_headers_list():
"""
Given:
- list of objects.
When:
- calling tableToMarkdown.
Then:
- return a valid table.
- the given headers list is not modified.
"""
headers = ['header_1', 'header_2']
data = {
'header_1': 'foo',
}
table = tableToMarkdown('tableToMarkdown test', data, removeNull=True, headers=headers)
assert 'header_2' not in table
assert headers == ['header_1', 'header_2']
@staticmethod
def test_date_fields_param():
"""
Given:
- List of objects with date fields in epoch format.
When:
- Calling tableToMarkdown with the given date fields.
Then:
- Return the date data in the markdown table in human-readable format.
"""
data = [
{
"docker_image": "demisto/python3",
"create_time": '1631521313466'
},
{
"docker_image": "demisto/python2",
"create_time": 1631521521466
}
]
table = tableToMarkdown('tableToMarkdown test', data, headers=["docker_image", "create_time"],
date_fields=['create_time'])
expected_md_table = '''### tableToMarkdown test
|docker_image|create_time|
|---|---|
| demisto/python3 | 2021-09-13 08:21:53 |
| demisto/python2 | 2021-09-13 08:25:21 |
'''
assert table == expected_md_table
@staticmethod
def test_with_json_transformers_default():
"""
Given:
- Nested json table.
When:
- Calling tableToMarkdown with `is_auto_transform_json` set to True.
Then:
- Parse the json table to the default format which supports nesting.
"""
with open('test_data/nested_data_example.json') as f:
nested_data_example = json.load(f)
table = tableToMarkdown("tableToMarkdown test", nested_data_example,
headers=['name', 'changelog', 'nested'],
is_auto_json_transform=True)
if IS_PY3:
expected_table = """### tableToMarkdown test
|name|changelog|nested|
|---|---|---|
| Active Directory Query | **1.0.4**:<br> ***path***: <br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>Fixed an issue where the ***ad-get-user*** command caused performance issues because the *limit* argument was not defined.<br><br> ***displayName***: 1.0.4 - R124496<br> ***released***: 2020-09-23T17:43:26Z<br>**1.0.5**:<br> ***path***: <br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed several typos.<br>- Updated the Docker image to: *demisto/ldap:1.0.0.11282*.<br><br> ***displayName***: 1.0.5 - 132259<br> ***released***: 2020-10-01T17:48:31Z<br>**1.0.6**:<br> ***path***: <br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed an issue where the DN parameter within query in the ***search-computer*** command was incorrect.<br>- Updated the Docker image to *demisto/ldap:1.0.0.12410*.<br><br> ***displayName***: 1.0.6 - 151676<br> ***released***: 2020-10-19T14:35:15Z | **item1**:<br> ***a***: 1<br> ***b***: 2<br> ***c***: 3<br> ***d***: 4 |
"""
else:
expected_table = u"""### tableToMarkdown test
|name|changelog|nested|
|---|---|---|
| Active Directory Query | **1.0.4**:<br> ***path***: <br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>Fixed an issue where the ***ad-get-user*** command caused performance issues because the *limit* argument was not defined.<br><br> ***displayName***: 1.0.4 - R124496<br> ***released***: 2020-09-23T17:43:26Z<br>**1.0.5**:<br> ***path***: <br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed several typos.<br>- Updated the Docker image to: *demisto/ldap:1.0.0.11282*.<br><br> ***displayName***: 1.0.5 - 132259<br> ***released***: 2020-10-01T17:48:31Z<br>**1.0.6**:<br> ***path***: <br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed an issue where the DN parameter within query in the ***search-computer*** command was incorrect.<br>- Updated the Docker image to *demisto/ldap:1.0.0.12410*.<br><br> ***displayName***: 1.0.6 - 151676<br> ***released***: 2020-10-19T14:35:15Z | **item1**:<br> ***a***: 1<br> ***c***: 3<br> ***b***: 2<br> ***d***: 4 |
"""
assert table == expected_table
@staticmethod
def test_with_json_transformer_simple():
with open('test_data/simple_data_example.json') as f:
simple_data_example = json.load(f)
name_transformer = JsonTransformer(keys=['first', 'second'])
json_transformer_mapping = {'name': name_transformer}
table = tableToMarkdown("tableToMarkdown test", simple_data_example,
json_transform_mapping=json_transformer_mapping)
if IS_PY3:
expected_table = """### tableToMarkdown test
|name|value|
|---|---|
| **first**:<br> ***a***: val<br><br>***second***: b | val1 |
| **first**:<br> ***a***: val2<br><br>***second***: d | val2 |
"""
else:
expected_table = u"""### tableToMarkdown test
|name|value|
|---|---|
| <br>***second***: b<br>**first**:<br> ***a***: val | val1 |
| <br>***second***: d<br>**first**:<br> ***a***: val2 | val2 |
"""
assert expected_table == table
@staticmethod
def test_with_json_transformer_nested():
"""
Given:
- Nested json table.
When:
- Calling tableToMarkdown with JsonTransformer with only `keys` given.
Then:
- The header key which is transformed will parsed with the relevant keys.
"""
with open('test_data/nested_data_example.json') as f:
nested_data_example = json.load(f)
changelog_transformer = JsonTransformer(keys=['releaseNotes', 'released'], is_nested=True)
table_json_transformer = {'changelog': changelog_transformer}
table = tableToMarkdown("tableToMarkdown test", nested_data_example, headers=['name', 'changelog'],
json_transform_mapping=table_json_transformer)
expected_table = """### tableToMarkdown test
|name|changelog|
|---|---|
| Active Directory Query | **1.0.4**:<br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>Fixed an issue where the ***ad-get-user*** command caused performance issues because the *limit* argument was not defined.<br><br> ***released***: 2020-09-23T17:43:26Z<br>**1.0.5**:<br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed several typos.<br>- Updated the Docker image to: *demisto/ldap:1.0.0.11282*.<br><br> ***released***: 2020-10-01T17:48:31Z<br>**1.0.6**:<br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed an issue where the DN parameter within query in the ***search-computer*** command was incorrect.<br>- Updated the Docker image to *demisto/ldap:1.0.0.12410*.<br><br> ***released***: 2020-10-19T14:35:15Z |
"""
assert expected_table == table
@staticmethod
def test_with_json_transformer_nested_complex():
"""
Given:
- Double nested json table.
When:
- Calling tableToMarkdown with JsonTransformer with only `keys_lst` given and `is_nested` set to True.
Then:
- The header key which is transformed will parsed with the relevant keys.
"""
with open('test_data/complex_nested_data_example.json') as f:
complex_nested_data_example = json.load(f)
changelog_transformer = JsonTransformer(keys=['releaseNotes', 'c'], is_nested=True)
table_json_transformer = {'changelog': changelog_transformer}
table = tableToMarkdown('tableToMarkdown test', complex_nested_data_example, headers=['name', 'changelog'],
json_transform_mapping=table_json_transformer)
expected_table = """### tableToMarkdown test
|name|changelog|
|---|---|
| Active Directory Query | **1.0.4**:<br> **path**:<br> **a**:<br> **b**:<br> ***c***: we should see this value<br>**1.0.4**:<br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>Fixed an issue where the ***ad-get-user*** command caused performance issues because the *limit* argument was not defined.<br><br>**1.0.5**:<br> **path**:<br> **a**:<br> **b**:<br> ***c***: we should see this value<br>**1.0.5**:<br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed several typos.<br>- Updated the Docker image to: *demisto/ldap:1.0.0.11282*.<br><br>**1.0.6**:<br> **path**:<br> **a**:<br> **b**:<br> ***c***: we should see this value<br>**1.0.6**:<br> ***releaseNotes***: <br>#### Integrations<br>##### Active Directory Query v2<br>- Fixed an issue where the DN parameter within query in the ***search-computer*** command was incorrect.<br>- Updated the Docker image to *demisto/ldap:1.0.0.12410*.<br> |
"""
assert expected_table == table
@staticmethod
def test_with_json_transformer_func():
def changelog_to_str(json_input):
return ', '.join(json_input.keys())
with open('test_data/nested_data_example.json') as f:
nested_data_example = json.load(f)
changelog_transformer = JsonTransformer(func=changelog_to_str)
table_json_transformer = {'changelog': changelog_transformer}
table = tableToMarkdown("tableToMarkdown test", nested_data_example, headers=['name', 'changelog'],
json_transform_mapping=table_json_transformer)
expected_table = """### tableToMarkdown test
|name|changelog|
|---|---|
| Active Directory Query | 1.0.4, 1.0.5, 1.0.6 |
"""
assert expected_table == table
@pytest.mark.parametrize('data, expected_data', COMPLEX_DATA_WITH_URLS)
def test_url_to_clickable_markdown(data, expected_data):
table = url_to_clickable_markdown(data, url_keys=['url', 'links'])
assert table == expected_data
def test_flatten_cell():
# sanity
utf8_to_flatten = b'abcdefghijklmnopqrstuvwxyz1234567890!'.decode('utf8')
flatten_text = flattenCell(utf8_to_flatten)
expected_string = 'abcdefghijklmnopqrstuvwxyz1234567890!'
assert flatten_text == expected_string
# list of uft8 and string to flatten
str_a = b'abcdefghijklmnopqrstuvwxyz1234567890!'
utf8_b = str_a.decode('utf8')
list_to_flatten = [str_a, utf8_b]
flatten_text2 = flattenCell(list_to_flatten)
expected_flatten_string = 'abcdefghijklmnopqrstuvwxyz1234567890!,\nabcdefghijklmnopqrstuvwxyz1234567890!'
assert flatten_text2 == expected_flatten_string
# special character test
special_char = u'会'
list_of_special = [special_char, special_char]
flattenCell(list_of_special)
flattenCell(special_char)
# dictionary test
dict_to_flatten = {'first': u'会'}
expected_flatten_dict = u'{\n "first": "\u4f1a"\n}'
assert flattenCell(dict_to_flatten) == expected_flatten_dict
def test_hash_djb2():
assert hash_djb2("test") == 2090756197, "Invalid value of hash_djb2"
def test_camelize():
non_camalized = [{'chookity_bop': 'asdasd'}, {'ab_c': 'd e', 'fgh_ijk': 'lm', 'nop': 'qr_st'}]
expected_output_upper_camel = [{'ChookityBop': 'asdasd'}, {'AbC': 'd e', 'Nop': 'qr_st', 'FghIjk': 'lm'}]
expected_output_lower_camel = [{'chookityBop': 'asdasd'}, {'abC': 'd e', 'nop': 'qr_st', 'fghIjk': 'lm'}]
assert camelize(non_camalized, '_') == expected_output_upper_camel
assert camelize(non_camalized, '_', upper_camel=True) == expected_output_upper_camel
assert camelize(non_camalized, '_', upper_camel=False) == expected_output_lower_camel
non_camalized2 = {'ab_c': 'd e', 'fgh_ijk': 'lm', 'nop': 'qr_st'}
expected_output2_upper_camel = {'AbC': 'd e', 'Nop': 'qr_st', 'FghIjk': 'lm'}
expected_output2_lower_camel = {'abC': 'd e', 'nop': 'qr_st', 'fghIjk': 'lm'}
assert camelize(non_camalized2, '_') == expected_output2_upper_camel
assert camelize(non_camalized2, '_', upper_camel=True) == expected_output2_upper_camel
assert camelize(non_camalized2, '_', upper_camel=False) == expected_output2_lower_camel
def test_camelize_string():
from CommonServerPython import camelize_string
non_camalized = ['chookity_bop', 'ab_c', 'fgh_ijk', 'nop']
expected_output_upper_camel = ['ChookityBop', 'AbC', 'FghIjk', 'Nop']
expected_output_lower_camel = ['chookityBop', 'abC', 'fghIjk', 'nop']
for i in range(len(non_camalized)):
assert camelize_string(non_camalized[i], '_') == expected_output_upper_camel[i]
assert camelize_string(non_camalized[i], '_', upper_camel=True) == expected_output_upper_camel[i]
assert camelize_string(non_camalized[i], '_', upper_camel=False) == expected_output_lower_camel[i]
def test_underscoreToCamelCase():
from CommonServerPython import underscoreToCamelCase
non_camalized = ['chookity_bop', 'ab_c', 'fgh_ijk', 'nop']
expected_output_upper_camel = ['ChookityBop', 'AbC', 'FghIjk', 'Nop']
expected_output_lower_camel = ['chookityBop', 'abC', 'fghIjk', 'nop']
for i in range(len(non_camalized)):
assert underscoreToCamelCase(non_camalized[i]) == expected_output_upper_camel[i]
assert underscoreToCamelCase(non_camalized[i], upper_camel=True) == expected_output_upper_camel[i]
assert underscoreToCamelCase(non_camalized[i], upper_camel=False) == expected_output_lower_camel[i]
# Note this test will fail when run locally (in pycharm/vscode) as it assumes the machine (docker image) has UTC timezone set
def test_date_to_timestamp():
assert date_to_timestamp('2018-11-06T08:56:41') == 1541494601000
assert date_to_timestamp(datetime.strptime('2018-11-06T08:56:41', "%Y-%m-%dT%H:%M:%S")) == 1541494601000
PASCAL_TO_SPACE_USE_CASES = [
('Validate', 'Validate'),
('validate', 'Validate'),
('TCP', 'TCP'),
('eventType', 'Event Type'),
('eventID', 'Event ID'),
('eventId', 'Event Id'),
('IPAddress', 'IP Address'),
('isDisabled', 'Is Disabled'),
('device-group', 'Device - Group'),
]
@pytest.mark.parametrize('s, expected', PASCAL_TO_SPACE_USE_CASES)
def test_pascalToSpace(s, expected):
assert pascalToSpace(s) == expected, 'Error on {} != {}'.format(pascalToSpace(s), expected)
def test_safe_load_json():
valid_json_str = '{"foo": "bar"}'
expected_valid_json_result = {u'foo': u'bar'}
assert expected_valid_json_result == safe_load_json(valid_json_str)
def test_remove_empty_elements():
test_dict = {
"foo": "bar",
"baz": {},
"empty": [],
"nested_dict": {
"empty_list": [],
"hummus": "pita"
},
"nested_list": {
"more_empty_list": []
}
}
expected_result = {
"foo": "bar",
"nested_dict": {
"hummus": "pita"
}
}
assert expected_result == remove_empty_elements(test_dict)
@pytest.mark.parametrize('header,raw_input,expected_output', [
('AWS DynamoDB DescribeBackup', {
'BackupDescription': {
"Foo": "Bar",
"Baz": "Bang",
"TestKey": "TestValue"
}
}, '''### AWS DynamoDB DescribeBackup\n|Baz|Foo|TestKey|\n|---|---|---|\n| Bang | Bar | TestValue |\n'''),
('Empty Results', {'key': []}, '### Empty Results\n**No entries.**\n')
])
def test_aws_table_to_markdown(header, raw_input, expected_output):
"""
Given
- A header and a dict with two levels
- A header and a dict with one key pointing to an empty list
When
- Creating a markdown table using the aws_table_to_markdown function
Ensure
- The header appears as a markdown header and the dictionary is translated to a markdown table
- The header appears as a markdown header and "No entries" text appears instead of a markdown table"
"""
assert aws_table_to_markdown(raw_input, header) == expected_output
def test_argToList():
expected = ['a', 'b', 'c']
test1 = ['a', 'b', 'c']
test2 = 'a,b,c'
test3 = '["a","b","c"]'
test4 = 'a;b;c'
test5 = 1
test6 = '1'
test7 = True
results = [argToList(test1), argToList(test2), argToList(test2, ','), argToList(test3), argToList(test4, ';')]
for result in results:
assert expected == result, 'argToList test failed, {} is not equal to {}'.format(str(result), str(expected))
assert argToList(test5) == [1]
assert argToList(test6) == ['1']
assert argToList(test7) == [True]
def test_remove_nulls():
temp_dictionary = {"a": "b", "c": 4, "e": [], "f": {}, "g": None, "h": "", "i": [1], "k": ()}
expected_dictionary = {"a": "b", "c": 4, "i": [1]}
remove_nulls_from_dictionary(temp_dictionary)
assert expected_dictionary == temp_dictionary, \
"remove_nulls_from_dictionary test failed, {} is not equal to {}".format(str(temp_dictionary),
str(expected_dictionary))
def test_is_error_true():
execute_command_results = [
{
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
]
assert is_error(execute_command_results)
def test_is_error_none():
assert not is_error(None)
def test_is_error_single_entry():
execute_command_results = {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
assert is_error(execute_command_results)
def test_is_error_false():
execute_command_results = [
{
"Type": entryTypes["note"],
"ContentsFormat": formats["text"],
"Contents": "this is regular note"
}
]
assert not is_error(execute_command_results)
def test_not_error_entry():
execute_command_results = "invalid command results as string"
assert not is_error(execute_command_results)
def test_get_error():
execute_command_results = [
{
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
]
error = get_error(execute_command_results)
assert error == "this is error message"
def test_get_error_single_entry():
execute_command_results = {
"Type": entryTypes["error"],
"ContentsFormat": formats["text"],
"Contents": "this is error message"
}
error = get_error(execute_command_results)
assert error == "this is error message"
def test_get_error_need_raise_error_on_non_error_input():
execute_command_results = [
{
"Type": entryTypes["note"],
"ContentsFormat": formats["text"],
"Contents": "this is not an error"
}
]
try:
get_error(execute_command_results)
except ValueError as exception:
assert "execute_command_result has no error entry. before using get_error use is_error" in str(exception)
return
assert False
@mark.parametrize('data,data_expected', [
("this is a test", b"this is a test"),
(u"עברית", u"עברית".encode('utf-8')),
(b"binary data\x15\x00", b"binary data\x15\x00"),
]) # noqa: E124
def test_fileResult(mocker, request, data, data_expected):
mocker.patch.object(demisto, 'uniqueFile', return_value="test_file_result")
mocker.patch.object(demisto, 'investigation', return_value={'id': '1'})
file_name = "1_test_file_result"
def cleanup():
try:
os.remove(file_name)
except OSError:
pass
request.addfinalizer(cleanup)
res = fileResult("test.txt", data)
assert res['File'] == "test.txt"
with open(file_name, 'rb') as f:
assert f.read() == data_expected
# Error that always returns a unicode string to it's str representation
class SpecialErr(Exception):
def __str__(self):
return u"מיוחד"
def test_logger():
from CommonServerPython import LOG
LOG(u'€')
LOG(Exception(u'€'))
LOG(SpecialErr(12))
def test_logger_write(mocker):
mocker.patch.object(demisto, 'params', return_value={
'credentials': {'password': 'my_password'},
})
mocker.patch.object(demisto, 'info')
ilog = IntegrationLogger()
ilog.write("This is a test with my_password")
ilog.print_log()
# assert that the print doesn't contain my_password
# call_args is tuple (args list, kwargs). we only need the args
args = demisto.info.call_args[0]
assert 'This is a test' in args[0]
assert 'my_password' not in args[0]
assert '<XX_REPLACED>' in args[0]
def test_logger_init_key_name(mocker):
mocker.patch.object(demisto, 'params', return_value={
'key': {'password': 'my_password'},
'secret': 'my_secret'
})
mocker.patch.object(demisto, 'info')
ilog = IntegrationLogger()
ilog.write("This is a test with my_password and my_secret")
ilog.print_log()
# assert that the print doesn't contain my_password
# call_args is tuple (args list, kwargs). we only need the args
args = demisto.info.call_args[0]
assert 'This is a test' in args[0]
assert 'my_password' not in args[0]
assert 'my_secret' not in args[0]
assert '<XX_REPLACED>' in args[0]
def test_logger_replace_strs(mocker):
mocker.patch.object(demisto, 'params', return_value={
'apikey': 'my_apikey',
})
ilog = IntegrationLogger()
ilog.add_replace_strs('special_str', 'ZAQ!@#$%&*', '') # also check that empty string is not added by mistake
ilog('my_apikey is special_str and b64: ' + b64_encode('my_apikey'))
ilog('special chars like ZAQ!@#$%&* should be replaced even when url-encoded like ZAQ%21%40%23%24%25%26%2A')
assert ('' not in ilog.replace_strs)
assert ilog.messages[0] == '<XX_REPLACED> is <XX_REPLACED> and b64: <XX_REPLACED>'
assert ilog.messages[1] == \
'special chars like <XX_REPLACED> should be replaced even when url-encoded like <XX_REPLACED>'
TEST_SSH_KEY_ESC = '-----BEGIN OPENSSH PRIVATE KEY-----\\nb3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAACFw' \
'AAAAdzc2gtcn\\n-----END OPENSSH PRIVATE KEY-----'
TEST_SSH_KEY = '-----BEGIN OPENSSH PRIVATE KEY-----\nb3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAACFw' \
'AAAAdzc2gtcn\n-----END OPENSSH PRIVATE KEY-----'
TEST_PASS_JSON_CHARS = 'json_chars'
SENSITIVE_PARAM = {
'app': None,
'authentication': {
'credential': '',
'credentials': {
'id': '',
'locked': False,
'modified': '0001-01-01T00: 00: 00Z',
'name': '',
'password': 'cred_pass',
'sortValues': None,
'sshkey': TEST_SSH_KEY,
'sshkeyEsc': TEST_SSH_KEY_ESC,
'sshkeyPass': 'ssh_key_secret_pass',
'user': '',
'vaultInstanceId': '',
'version': 0,
'workgroup': ''
},
'identifier': 'admin',
'password': 'ident_pass',
'passwordChanged': False
},
'password': TEST_PASS_JSON_CHARS + '\\"',
}
def test_logger_replace_strs_credentials(mocker):
mocker.patch.object(demisto, 'params', return_value=SENSITIVE_PARAM)
basic_auth = b64_encode(
'{}:{}'.format(SENSITIVE_PARAM['authentication']['identifier'], SENSITIVE_PARAM['authentication']['password']))
ilog = IntegrationLogger()
# log some secrets
ilog('my cred pass: cred_pass. my ssh key: ssh_key_secret. my ssh key: {}.'
'my ssh key: {}. my ssh pass: ssh_key_secret_pass. ident: ident_pass.'
' basic auth: {}'.format(TEST_SSH_KEY, TEST_SSH_KEY_ESC, basic_auth))
for s in ('cred_pass', TEST_SSH_KEY, TEST_SSH_KEY_ESC, 'ssh_key_secret_pass', 'ident_pass', basic_auth):
assert s not in ilog.messages[0]
def test_debug_logger_replace_strs(mocker):
mocker.patch.object(demisto, 'params', return_value=SENSITIVE_PARAM)
debug_logger = DebugLogger()
debug_logger.int_logger.set_buffering(True)
debug_logger.log_start_debug()
msg = debug_logger.int_logger.messages[0]
assert 'debug-mode started' in msg
assert 'Params:' in msg
for s in ('cred_pass', 'ssh_key_secret', 'ssh_key_secret_pass', 'ident_pass', TEST_SSH_KEY,
TEST_SSH_KEY_ESC, TEST_PASS_JSON_CHARS):
assert s not in msg
def test_build_curl_post_noproxy():
"""
Given:
- HTTP client log messages of POST query
- Proxy is not used and insecure is not checked
When
- Building curl query
Then
- Ensure curl is generated as expected
"""
ilog = IntegrationLogger()
ilog.build_curl("send: b'POST /api HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"data\": \"value\"}'")
assert ilog.curl == [
'curl -X POST https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"data": "value"}\''
]
def test_build_curl_post_xml():
"""
Given:
- HTTP client log messages of POST query with XML body
- Proxy is not used and insecure is not checked
When
- Building curl query
Then
- Ensure curl is generated as expected
"""
ilog = IntegrationLogger()
ilog.build_curl("send: b'POST /api HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'<?xml version=\"1.0\" encoding=\"utf-8\"?>'")
assert ilog.curl == [
'curl -X POST https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'<?xml version="1.0" encoding="utf-8"?>\''
]
def test_build_curl_get_withproxy(mocker):
"""
Given:
- HTTP client log messages of GET query
- Proxy used and insecure checked
When
- Building curl query
Then
- Ensure curl is generated as expected
"""
mocker.patch.object(demisto, 'params', return_value={
'proxy': True,
'insecure': True
})
os.environ['https_proxy'] = 'http://proxy'
ilog = IntegrationLogger()
ilog.build_curl("send: b'GET /api HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"data\": \"value\"}'")
assert ilog.curl == [
'curl -X GET https://demisto.com/api -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--proxy http://proxy -k -d \'{"data": "value"}\''
]
def test_build_curl_multiple_queries():
"""
Given:
- HTTP client log messages of POST and GET queries
- Proxy is not used and insecure is not checked
When
- Building curl query
Then
- Ensure two curl queries are generated as expected
"""
ilog = IntegrationLogger()
ilog.build_curl("send: b'POST /api/post HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"postdata\": \"value\"}'")
ilog.build_curl("send: b'GET /api/get HTTP/1.1\\r\\n"
"Host: demisto.com\\r\\n"
"User-Agent: python-requests/2.25.0\\r\\n"
"Accept-Encoding: gzip, deflate\r\n"
"Accept: */*\\r\\n"
"Connection: keep-alive\\r\\n"
"Authorization: TOKEN\\r\\n"
"Content-Length: 57\\r\\n"
"Content-Type: application/json\\r\\n\\r\\n'")
ilog.build_curl("send: b'{\"getdata\": \"value\"}'")
assert ilog.curl == [
'curl -X POST https://demisto.com/api/post -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"postdata": "value"}\'',
'curl -X GET https://demisto.com/api/get -H "Authorization: TOKEN" -H "Content-Type: application/json" '
'--noproxy "*" -d \'{"getdata": "value"}\''
]
def test_is_mac_address():
from CommonServerPython import is_mac_address
mac_address_false = 'AA:BB:CC:00:11'
mac_address_true = 'AA:BB:CC:00:11:22'
assert (is_mac_address(mac_address_false) is False)
assert (is_mac_address(mac_address_true))
def test_return_error_command(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
outputs = {'output': 'error'}
expected_error = {
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': err_msg,
"EntryContext": outputs
}
# Test command that is not fetch-incidents
mocker.patch.object(demisto, 'command', return_value="test-command")
mocker.patch.object(sys, 'exit')
mocker.spy(demisto, 'results')
return_error(err_msg, '', outputs)
assert str(demisto.results.call_args) == "call({})".format(expected_error)
def test_return_error_fetch_incidents(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test fetch-incidents
mocker.patch.object(demisto, 'command', return_value="fetch-incidents")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_fetch_credentials(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test fetch-credentials
mocker.patch.object(demisto, 'command', return_value="fetch-credentials")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_fetch_indicators(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test fetch-indicators
mocker.patch.object(demisto, 'command', return_value="fetch-indicators")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_long_running_execution(mocker):
from CommonServerPython import return_error
err_msg = "Testing unicode Ё"
# Test long-running-execution
mocker.patch.object(demisto, 'command', return_value="long-running-execution")
returned_error = False
try:
return_error(err_msg)
except Exception as e:
returned_error = True
assert str(e) == err_msg
assert returned_error
def test_return_error_script(mocker, monkeypatch):
from CommonServerPython import return_error
mocker.patch.object(sys, 'exit')
mocker.spy(demisto, 'results')
monkeypatch.delattr(demisto, 'command')
err_msg = "Testing unicode Ё"
outputs = {'output': 'error'}
expected_error = {
'Type': entryTypes['error'],
'ContentsFormat': formats['text'],
'Contents': err_msg,
"EntryContext": outputs
}
assert not hasattr(demisto, 'command')
return_error(err_msg, '', outputs)
assert str(demisto.results.call_args) == "call({})".format(expected_error)
def test_exception_in_return_error(mocker):
from CommonServerPython import return_error, IntegrationLogger
expected = {'EntryContext': None, 'Type': 4, 'ContentsFormat': 'text', 'Contents': 'Message'}
mocker.patch.object(demisto, 'results')
mocker.patch.object(IntegrationLogger, '__call__', return_value='Message')
with raises(SystemExit, match='0'):
return_error("Message", error=ValueError("Error!"))
results = demisto.results.call_args[0][0]
assert expected == results
# IntegrationLogger = LOG (2 times if exception supplied)
assert IntegrationLogger.__call__.call_count == 2
def test_return_error_get_modified_remote_data(mocker):
from CommonServerPython import return_error
mocker.patch.object(demisto, 'command', return_value='get-modified-remote-data')
mocker.patch.object(demisto, 'results')
err_msg = 'Test Error'
with raises(SystemExit):
return_error(err_msg)
assert demisto.results.call_args[0][0]['Contents'] == 'skip update. error: ' + err_msg
def test_return_error_get_modified_remote_data_not_implemented(mocker):
from CommonServerPython import return_error
mocker.patch.object(demisto, 'command', return_value='get-modified-remote-data')
mocker.patch.object(demisto, 'results')
err_msg = 'Test Error'
with raises(SystemExit):
try:
raise NotImplementedError('Command not implemented')
except:
return_error(err_msg)
assert demisto.results.call_args[0][0]['Contents'] == err_msg
def test_indicator_type_by_server_version_under_6_1(mocker, clear_version_cache):
"""
Given
- demisto version mock under 6.2
When
- demisto version mock under 6.2
Then
- Do not remove the STIX indicator type prefix.
"""
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.1.0',
}
)
assert FeedIndicatorType.indicator_type_by_server_version("STIX Attack Pattern") == "STIX Attack Pattern"
def test_indicator_type_by_server_version_6_2(mocker, clear_version_cache):
"""
Given
- demisto version mock set to 6.2
When
- demisto version mock set to 6.2
Then
- Return the STIX indicator type with the STIX prefix
"""
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.2.0',
}
)
assert FeedIndicatorType.indicator_type_by_server_version("STIX Attack Pattern") == "Attack Pattern"
def test_assign_params():
from CommonServerPython import assign_params
res = assign_params(a='1', b=True, c=None, d='')
assert res == {'a': '1', 'b': True}
class TestBuildDBotEntry(object):
def test_build_dbot_entry(self):
from CommonServerPython import build_dbot_entry
res = build_dbot_entry('user@example.com', 'Email', 'Vendor', 1)
assert res == {'DBotScore': {'Indicator': 'user@example.com', 'Type': 'email', 'Vendor': 'Vendor', 'Score': 1}}
def test_build_dbot_entry_no_malicious(self):
from CommonServerPython import build_dbot_entry
res = build_dbot_entry('user@example.com', 'Email', 'Vendor', 3, build_malicious=False)
assert res == {'DBotScore': {'Indicator': 'user@example.com', 'Type': 'email', 'Vendor': 'Vendor', 'Score': 3}}
def test_build_dbot_entry_malicious(self):
from CommonServerPython import build_dbot_entry, outputPaths
res = build_dbot_entry('user@example.com', 'Email', 'Vendor', 3, 'Malicious email')
assert res == {
"DBotScore": {
"Vendor": "Vendor",
"Indicator": "user@example.com",
"Score": 3,
"Type": "email"
},
outputPaths['email']: {
"Malicious": {
"Vendor": "Vendor",
"Description": "Malicious email"
},
"Address": "user@example.com"
}
}
def test_build_malicious_dbot_entry_file(self):
from CommonServerPython import build_malicious_dbot_entry, outputPaths
res = build_malicious_dbot_entry('md5hash', 'MD5', 'Vendor', 'Google DNS')
assert res == {
outputPaths['file']:
{"Malicious": {"Vendor": "Vendor", "Description": "Google DNS"}, "MD5": "md5hash"}}
def test_build_malicious_dbot_entry(self):
from CommonServerPython import build_malicious_dbot_entry, outputPaths
res = build_malicious_dbot_entry('8.8.8.8', 'ip', 'Vendor', 'Google DNS')
assert res == {outputPaths['ip']: {
'Address': '8.8.8.8', 'Malicious': {'Vendor': 'Vendor', 'Description': 'Google DNS'}}}
def test_build_malicious_dbot_entry_wrong_indicator_type(self):
from CommonServerPython import build_malicious_dbot_entry, DemistoException
with raises(DemistoException, match='Wrong indicator type'):
build_malicious_dbot_entry('8.8.8.8', 'notindicator', 'Vendor', 'Google DNS')
def test_illegal_dbot_score(self):
from CommonServerPython import build_dbot_entry, DemistoException
with raises(DemistoException, match='illegal DBot score'):
build_dbot_entry('1', 'ip', 'Vendor', 8)
def test_illegal_indicator_type(self):
from CommonServerPython import build_dbot_entry, DemistoException
with raises(DemistoException, match='illegal indicator type'):
build_dbot_entry('1', 'NOTHING', 'Vendor', 2)
def test_file_indicators(self):
from CommonServerPython import build_dbot_entry, outputPaths
res = build_dbot_entry('md5hash', 'md5', 'Vendor', 3)
assert res == {
"DBotScore": {
"Indicator": "md5hash",
"Type": "file",
"Vendor": "Vendor",
"Score": 3
},
outputPaths['file']: {
"MD5": "md5hash",
"Malicious": {
"Vendor": "Vendor",
"Description": None
}
}
}
class TestCommandResults:
def test_outputs_without_outputs_prefix(self):
"""
Given
- outputs as a list without output_prefix
When
- Returins results
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import CommandResults
with pytest.raises(ValueError, match='outputs_prefix'):
CommandResults(outputs=[])
def test_dbot_score_is_in_to_context_ip(self):
"""
Given
- IP indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and IP output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = '1.1.1.1'
raw_response = {'id': indicator_id}
indicator = Common.IP(
indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.IP,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='Indicator!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.IP.CONTEXT_PATH in entry_context
def test_dbot_score_is_in_to_context_file(self):
"""
Given
- File indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and File output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = '63347f5d946164a23faca26b78a91e1c'
raw_response = {'id': indicator_id}
indicator = Common.File(
md5=indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.FILE,
'Indicator',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='output!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.File.CONTEXT_PATH in entry_context
def test_dbot_score_is_in_to_context_domain(self):
"""
Given
- domain indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and File output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = 'example.com'
raw_response = {'id': indicator_id}
indicator = Common.Domain(
indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.DOMAIN,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='output!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.Domain.CONTEXT_PATH in entry_context
def test_dbot_score_is_in_to_context_url(self):
"""
Given
- domain indicator
When
- Creating a reputation
Then
- Validate the DBOT Score and File output exists in entry context.
"""
from CommonServerPython import Common, DBotScoreType, CommandResults
indicator_id = 'https://example.com'
raw_response = {'id': indicator_id}
indicator = Common.URL(
indicator_id,
dbot_score=Common.DBotScore(
indicator_id,
DBotScoreType.URL,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
)
entry_context = CommandResults(
indicator=indicator,
readable_output='output!',
outputs={'Indicator': raw_response},
raw_response=raw_response
).to_context()['EntryContext']
assert Common.DBotScore.CONTEXT_PATH in entry_context
assert Common.URL.CONTEXT_PATH in entry_context
def test_multiple_outputs_keys(self):
"""
Given
- File has 3 unique keys. sha256, md5 and sha1
When
- creating CommandResults with outputs_key_field=[sha1, sha256, md5]
Then
- entrycontext DT expression contains all 3 unique fields
"""
from CommonServerPython import CommandResults
files = [
{
'sha256': '111',
'sha1': '111',
'md5': '111'
},
{
'sha256': '222',
'sha1': '222',
'md5': '222'
}
]
results = CommandResults(outputs_prefix='File', outputs_key_field=['sha1', 'sha256', 'md5'], outputs=files)
assert list(results.to_context()['EntryContext'].keys())[0] == \
'File(val.sha1 && val.sha1 == obj.sha1 && val.sha256 && val.sha256 == obj.sha256 && val.md5 && val.md5 == obj.md5)'
def test_output_prefix_includes_dt(self):
"""
Given
- Returning File with only outputs_prefix which includes DT in it
- outputs key fields are not provided
When
- creating CommandResults
Then
- EntryContext key should contain only the outputs_prefix
"""
from CommonServerPython import CommandResults
files = [{"key": "value"}] # if outputs is empty list, no results are returned
results = CommandResults(outputs_prefix='File(val.sha1 == obj.sha1 && val.md5 == obj.md5)',
outputs_key_field='', outputs=files)
assert list(results.to_context()['EntryContext'].keys())[0] == \
'File(val.sha1 == obj.sha1 && val.md5 == obj.md5)'
@pytest.mark.parametrize('score, expected_readable',
[(CommonServerPython.Common.DBotScore.NONE, 'Unknown'),
(CommonServerPython.Common.DBotScore.GOOD, 'Good'),
(CommonServerPython.Common.DBotScore.SUSPICIOUS, 'Suspicious'),
(CommonServerPython.Common.DBotScore.BAD, 'Bad')])
def test_dbot_readable(self, score, expected_readable):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=score
)
assert dbot_score.to_readable() == expected_readable
def test_dbot_readable_invalid(self):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=0
)
dbot_score.score = 7
assert dbot_score.to_readable() == 'Undefined'
dbot_score.score = None
assert dbot_score.to_readable() == 'Undefined'
def test_readable_only_context(self):
"""
Given:
- Markdown entry to CommandResults
When:
- Returning results
Then:
- Validate HumanReadable exists
"""
from CommonServerPython import CommandResults
markdown = '## Something'
context = CommandResults(readable_output=markdown).to_context()
assert context.get('HumanReadable') == markdown
def test_empty_outputs(self):
"""
Given:
- Outputs as None
When:
- Returning results
Then:
- Validate EntryContext key value
"""
from CommonServerPython import CommandResults
res = CommandResults(
outputs_prefix='FoundIndicators',
outputs_key_field='value',
outputs=None
)
context = res.to_context()
assert {} == context.get('EntryContext')
def test_empty_list_outputs(self):
"""
Given:
- Outputs with empty list
When:
- Returning results
Then:
- Validate EntryContext key value
"""
from CommonServerPython import CommandResults
res = CommandResults(
outputs_prefix='FoundIndicators',
outputs_key_field='value',
outputs=[]
)
context = res.to_context()
assert {} == context.get('EntryContext')
def test_return_command_results(self, clear_version_cache):
from CommonServerPython import Common, CommandResults, EntryFormat, EntryType, DBotScoreType
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score,
asn='some asn',
hostname='test.com',
geo_country=None,
geo_description=None,
geo_latitude=None,
geo_longitude=None,
positive_engines=None,
detection_engines=None
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[ip]
)
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': None,
'HumanReadable': None,
'EntryContext': {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8',
'ASN': 'some asn',
'Hostname': 'test.com'
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Vendor': 'Test',
'Score': 1,
'Type': 'ip'
}
]
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_multiple_indicators(self, clear_version_cache):
from CommonServerPython import Common, CommandResults, EntryFormat, EntryType, DBotScoreType
dbot_score1 = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip1 = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score1,
asn='some asn',
hostname='test.com',
geo_country=None,
geo_description=None,
geo_latitude=None,
geo_longitude=None,
positive_engines=None,
detection_engines=None
)
dbot_score2 = Common.DBotScore(
indicator='5.5.5.5',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip2 = Common.IP(
ip='5.5.5.5',
dbot_score=dbot_score2,
asn='some asn',
hostname='test.com',
geo_country=None,
geo_description=None,
geo_latitude=None,
geo_longitude=None,
positive_engines=None,
detection_engines=None
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[ip1, ip2]
)
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': None,
'HumanReadable': None,
'EntryContext': {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8',
'ASN': 'some asn',
'Hostname': 'test.com'
},
{
'Address': '5.5.5.5',
'ASN': 'some asn',
'Hostname': 'test.com'
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Vendor': 'Test',
'Score': 1,
'Type': 'ip'
},
{
'Indicator': '5.5.5.5',
'Vendor': 'Test',
'Score': 1,
'Type': 'ip'
}
]
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_return_list_of_items(self, clear_version_cache):
from CommonServerPython import CommandResults, EntryFormat, EntryType
tickets = [
{
'ticket_id': 1,
'title': 'foo'
},
{
'ticket_id': 2,
'title': 'goo'
}
]
results = CommandResults(
outputs_prefix='Jira.Ticket',
outputs_key_field='ticket_id',
outputs=tickets
)
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': tickets,
'HumanReadable': tableToMarkdown('Results', tickets),
'EntryContext': {
'Jira.Ticket(val.ticket_id && val.ticket_id == obj.ticket_id)': tickets
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_return_list_of_items_the_old_way(self):
from CommonServerPython import CommandResults, EntryFormat, EntryType
tickets = [
{
'ticket_id': 1,
'title': 'foo'
},
{
'ticket_id': 2,
'title': 'goo'
}
]
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs={
'Jira.Ticket(val.ticket_id == obj.ticket_id)': tickets
},
raw_response=tickets
)
assert sorted(results.to_context()) == sorted({
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': tickets,
'HumanReadable': None,
'EntryContext': {
'Jira.Ticket(val.ticket_id == obj.ticket_id)': tickets
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
})
def test_create_dbot_score_with_invalid_score(self):
from CommonServerPython import Common, DBotScoreType
try:
Common.DBotScore(
indicator='8.8.8.8',
integration_name='Virus Total',
score=100,
indicator_type=DBotScoreType.IP
)
assert False
except TypeError:
assert True
def test_create_dbot_score_with_invalid_reliability(self):
"""
Given:
- an invalid reliability value.
When
- creating a DBotScore entry
Then
- an error should be raised
"""
from CommonServerPython import Common, DBotScoreType
try:
Common.DBotScore(
indicator='8.8.8.8',
integration_name='Virus Total',
score=0,
indicator_type=DBotScoreType.IP,
reliability='Not a reliability'
)
assert False
except TypeError:
assert True
def test_create_dbot_score_with_valid_reliability(self):
"""
Given:
- a valid reliability value
When
- creating a DBotScore entry
Then
- the proper entry is created
"""
from CommonServerPython import Common, DBotScoreType, DBotScoreReliability, CommandResults
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
score=Common.DBotScore.GOOD,
indicator_type=DBotScoreType.IP,
reliability=DBotScoreReliability.B,
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score,
)
results = CommandResults(
indicator=ip,
)
assert results.to_context()['EntryContext'] == {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8'
}
],
'DBotScore(val.Indicator && val.Indicator == '
'obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Type': 'ip',
'Vendor': 'Test',
'Score': 1,
'Reliability': 'B - Usually reliable'
}
]
}
def test_indicator_timeline_with_list_of_indicators(self):
"""
Given:
- a list of an indicator
When
- creating an IndicatorTimeline object
- creating a CommandResults objects using the IndicatorTimeline object
Then
- the IndicatorTimeline receives the appropriate category and message
"""
from CommonServerPython import CommandResults, IndicatorsTimeline
indicators = ['8.8.8.8']
timeline = IndicatorsTimeline(indicators=indicators, category='test', message='message')
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs=None,
raw_response=indicators,
indicators_timeline=timeline
)
assert sorted(results.to_context().get('IndicatorTimeline')) == sorted([
{'Value': '8.8.8.8', 'Category': 'test', 'Message': 'message'}
])
def test_indicator_timeline_running_from_an_integration(self, mocker):
"""
Given:
- a list of an indicator
When
- mocking the demisto.params()
- creating an IndicatorTimeline object
- creating a CommandResults objects using the IndicatorTimeline object
Then
- the IndicatorTimeline receives the appropriate category and message
"""
from CommonServerPython import CommandResults, IndicatorsTimeline
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
indicators = ['8.8.8.8']
timeline = IndicatorsTimeline(indicators=indicators)
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs=None,
raw_response=indicators,
indicators_timeline=timeline
)
assert sorted(results.to_context().get('IndicatorTimeline')) == sorted([
{'Value': '8.8.8.8', 'Category': 'Integration Update'}
])
def test_single_indicator(self, mocker):
"""
Given:
- a single indicator
When
- mocking the demisto.params()
- creating an Common.IP object
- creating a CommandResults objects using the indicator member
Then
- The CommandResults.to_context() returns single result of standard output IP and DBotScore
"""
from CommonServerPython import CommandResults, Common, DBotScoreType
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Test',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score
)
results = CommandResults(
indicator=ip
)
assert results.to_context()['EntryContext'] == {
'IP(val.Address && val.Address == obj.Address)': [
{
'Address': '8.8.8.8'
}
],
'DBotScore(val.Indicator && val.Indicator == '
'obj.Indicator && val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': '8.8.8.8',
'Type': 'ip',
'Vendor': 'Test',
'Score': 1
}
]
}
def test_single_indicator_with_indicators(self, mocker):
"""
Given:
- a single indicator and a list of indicators
When
- mocking the demisto.params()
- creating an Common.IP object
- creating a CommandResults objects using the indicator member AND indicators member
Then
- The CommandResults.__init__() should raise an ValueError with appropriate error
"""
from CommonServerPython import CommandResults, Common, DBotScoreType
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
dbot_score = Common.DBotScore(
indicator='8.8.8.8',
integration_name='Virus Total',
indicator_type=DBotScoreType.IP,
score=Common.DBotScore.GOOD
)
ip = Common.IP(
ip='8.8.8.8',
dbot_score=dbot_score
)
with pytest.raises(ValueError) as e:
CommandResults(
indicator=ip,
indicators=[ip]
)
assert e.value.args[0] == 'indicators is DEPRECATED, use only indicator'
def test_indicator_with_no_auto_extract(self):
"""
Given:
- a list of an indicator
- ignore_auto_extract set to True
When
- creating a CommandResults object with an indicator
- using Ignore Auto Extract
Then
- the IgnoreAutoExtract field is set to True
"""
from CommonServerPython import CommandResults
indicators = ['8.8.8.8']
results = CommandResults(
outputs_prefix=None,
outputs_key_field=None,
outputs=None,
raw_response=indicators,
indicators_timeline=None,
ignore_auto_extract=True
)
assert results.to_context().get('IgnoreAutoExtract') is True
def test_entry_as_note(self):
"""
Given:
- mark_as_note set to True
When:
- creating a CommandResults object
Then:
- the Note field is set to True
"""
from CommonServerPython import CommandResults
results = CommandResults(
outputs_prefix='Test',
outputs_key_field='value',
outputs=None,
mark_as_note=True
)
assert results.to_context().get('Note') is True
def test_http_request_ssl_ciphers_insecure():
if IS_PY3 and PY_VER_MINOR >= 10:
from CommonServerPython import BaseClient
client = BaseClient('https://www.google.com', ok_codes=(200, 201), verify=False)
adapter = client._session.adapters.get('https://')
ssl_context = adapter.poolmanager.connection_pool_kw['ssl_context']
ciphers_list = ssl_context.get_ciphers()
assert len(ciphers_list) == 42
assert next(cipher for cipher in ciphers_list if cipher['name'] == 'AES128-GCM-SHA256')
else:
assert True
class TestBaseClient:
from CommonServerPython import BaseClient
text = {"status": "ok"}
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201))
RETRIES_POSITIVE_TEST = [
'get',
'put',
'post'
]
@pytest.mark.skip(reason="Test - too long, only manual")
@pytest.mark.parametrize('method', RETRIES_POSITIVE_TEST)
def test_http_requests_with_retry_sanity(self, method):
"""
Given
- A base client
When
- Making http request call with retries configured to a number higher then 0
Then
- Ensure a successful request return response as expected
"""
url = 'http://httpbin.org/{}'.format(method)
res = self.client._http_request(method,
'',
full_url=url,
retries=1,
status_list_to_retry=[401])
assert res['url'] == url
RETRIES_NEGATIVE_TESTS_INPUT = [
('get', 400), ('get', 401), ('get', 500),
('put', 400), ('put', 401), ('put', 500),
('post', 400), ('post', 401), ('post', 500),
]
@pytest.mark.skip(reason="Test - too long, only manual")
@pytest.mark.parametrize('method, status', RETRIES_NEGATIVE_TESTS_INPUT)
def test_http_requests_with_retry_negative_sanity(self, method, status):
"""
Given
- A base client
When
- Making http request call with retries configured to a number higher then 0
Then
- An unsuccessful request returns a DemistoException regardless the bad status code.
"""
from CommonServerPython import DemistoException
with raises(DemistoException, match='{}'.format(status)):
self.client._http_request(method,
'',
full_url='http://httpbin.org/status/{}'.format(status),
retries=3,
status_list_to_retry=[400, 401, 500])
def test_http_request_json(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
res = self.client._http_request('get', 'event')
assert res == self.text
def test_http_request_json_negative(self, requests_mock):
from CommonServerPython import DemistoException
text = 'notjson'
requests_mock.get('http://example.com/api/v2/event', text=text)
with raises(DemistoException, match="Failed to parse json") as exception:
self.client._http_request('get', 'event')
assert exception.value.res
assert exception.value.res.text == text
def test_http_request_text(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
res = self.client._http_request('get', 'event', resp_type='text')
assert res == json.dumps(self.text)
def test_http_request_content(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', content=str.encode(json.dumps(self.text)))
res = self.client._http_request('get', 'event', resp_type='content')
assert json.loads(res) == self.text
def test_http_request_response(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event')
res = self.client._http_request('get', 'event', resp_type='response')
assert isinstance(res, requests.Response)
def test_http_request_proxy_false(self):
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = 'http://testproxy:8899'
os.environ['https_proxy'] = 'https://testproxy:8899'
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=False, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert not m.last_request.proxies
assert m.called is True
def test_http_request_proxy_true(self):
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = 'http://testproxy:8899'
os.environ['https_proxy'] = 'https://testproxy:8899'
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert m.last_request.proxies == {
'http': 'http://testproxy:8899',
'https': 'https://testproxy:8899'
}
assert m.called is True
def test_http_request_proxy_without_http_prefix(self):
"""
Given
- proxy param is set to true
- proxy configs are without http/https prefix
When
- run an http get request
Then
- the request will run and will use proxy configs that will include http:// prefix.
"""
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = 'testproxy:8899'
os.environ['https_proxy'] = 'testproxy:8899'
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert m.last_request.proxies == {
'http': 'http://testproxy:8899',
'https': 'http://testproxy:8899'
}
assert m.called is True
def test_http_request_proxy_empty_proxy(self):
"""
Given
- proxy param is set to true
- proxy configs are empty
When
- run an http get request
Then
- the request will run and will use empty proxy configs and will not add https prefixes
"""
from CommonServerPython import BaseClient
import requests_mock
os.environ['http_proxy'] = ''
os.environ['https_proxy'] = ''
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=True)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify == '/test1.pem'
assert m.last_request.proxies == {}
assert m.called is True
def test_http_request_verify_false(self):
from CommonServerPython import BaseClient
import requests_mock
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
client = BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), proxy=True, verify=False)
with requests_mock.mock() as m:
m.get('http://example.com/api/v2/event')
res = client._http_request('get', 'event', resp_type='response')
assert m.last_request.verify is False
assert m.called is True
def test_http_request_not_ok(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', status_code=500)
with raises(DemistoException, match="[500]"):
self.client._http_request('get', 'event')
def test_http_request_not_ok_but_ok(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', status_code=500)
res = self.client._http_request('get', 'event', resp_type='response', ok_codes=(500,))
assert res.status_code == 500
def test_http_request_not_ok_with_json(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', status_code=500, content=str.encode(json.dumps(self.text)))
with raises(DemistoException, match="Error in API call"):
self.client._http_request('get', 'event')
def test_http_request_not_ok_with_json_parsing(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', status_code=500, content=str.encode(json.dumps(self.text)))
with raises(DemistoException) as exception:
self.client._http_request('get', 'event')
message = str(exception.value)
response_json_error = json.loads(message.split('\n')[1])
assert response_json_error == self.text
def test_http_request_timeout(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectTimeout)
with raises(DemistoException, match="Connection Timeout Error"):
self.client._http_request('get', 'event')
def test_http_request_ssl_error(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.SSLError)
with raises(DemistoException, match="SSL Certificate Verification Failed"):
self.client._http_request('get', 'event', resp_type='response')
def test_http_request_ssl_error_insecure(cls, requests_mock):
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.SSLError('test ssl'))
client = cls.BaseClient('http://example.com/api/v2/', ok_codes=(200, 201), verify=False)
with raises(requests.exceptions.SSLError, match="^test ssl$"):
client._http_request('get', 'event', resp_type='response')
def test_http_request_proxy_error(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ProxyError)
with raises(DemistoException, match="Proxy Error"):
self.client._http_request('get', 'event', resp_type='response')
def test_http_request_connection_error(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event', exc=requests.exceptions.ConnectionError)
with raises(DemistoException, match="Verify that the server URL parameter"):
self.client._http_request('get', 'event', resp_type='response')
def test_text_exception_parsing(self, requests_mock):
from CommonServerPython import DemistoException
reason = 'Bad Request'
text = 'additional text'
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
reason=reason,
text=text)
with raises(DemistoException, match='- {}\n{}'.format(reason, text)):
self.client._http_request('get', 'event', resp_type='text')
def test_json_exception_parsing(self, requests_mock):
from CommonServerPython import DemistoException
reason = 'Bad Request'
json_response = {'error': 'additional text'}
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
reason=reason,
json=json_response)
with raises(DemistoException, match='- {}\n.*{}'.format(reason, json_response["error"])):
self.client._http_request('get', 'event', resp_type='text')
def test_exception_response_json_parsing_when_ok_code_is_invalid(self, requests_mock):
from CommonServerPython import DemistoException
json_response = {'error': 'additional text'}
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
json=json_response)
try:
self.client._http_request('get', 'event', ok_codes=(200,))
except DemistoException as e:
resp_json = e.res.json()
assert e.res.status_code == 400
assert resp_json.get('error') == 'additional text'
def test_exception_response_text_parsing_when_ok_code_is_invalid(self, requests_mock):
from CommonServerPython import DemistoException
requests_mock.get('http://example.com/api/v2/event',
status_code=400,
text='{"error": "additional text"}')
try:
self.client._http_request('get', 'event', ok_codes=(200,))
except DemistoException as e:
resp_json = json.loads(e.res.text)
assert e.res.status_code == 400
assert resp_json.get('error') == 'additional text'
def test_http_request_timeout_default(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
self.client._http_request('get', 'event')
assert requests_mock.last_request.timeout == self.client.REQUESTS_TIMEOUT
def test_http_request_timeout_given_func(self, requests_mock):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
timeout = 120
self.client._http_request('get', 'event', timeout=timeout)
assert requests_mock.last_request.timeout == timeout
def test_http_request_timeout_given_class(self, requests_mock):
from CommonServerPython import BaseClient
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
timeout = 44
new_client = BaseClient('http://example.com/api/v2/', timeout=timeout)
new_client._http_request('get', 'event')
assert requests_mock.last_request.timeout == timeout
def test_http_request_timeout_environ_system(self, requests_mock, mocker):
from CommonServerPython import BaseClient
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
timeout = 10
mocker.patch.dict(os.environ, {'REQUESTS_TIMEOUT': str(timeout)})
new_client = BaseClient('http://example.com/api/v2/')
new_client._http_request('get', 'event')
assert requests_mock.last_request.timeout == timeout
def test_http_request_timeout_environ_integration(self, requests_mock, mocker):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
timeout = 180.1
# integration name is set to Test in the fixture handle_calling_context
mocker.patch.dict(os.environ, {'REQUESTS_TIMEOUT.Test': str(timeout)})
from CommonServerPython import BaseClient
new_client = BaseClient('http://example.com/api/v2/')
new_client._http_request('get', 'event')
assert requests_mock.last_request.timeout == timeout
def test_http_request_timeout_environ_script(self, requests_mock, mocker):
requests_mock.get('http://example.com/api/v2/event', text=json.dumps(self.text))
timeout = 23.4
script_name = 'TestScript'
mocker.patch.dict(os.environ, {'REQUESTS_TIMEOUT.' + script_name: str(timeout)})
mocker.patch.dict(demisto.callingContext, {'context': {'ScriptName': script_name}})
mocker.patch.object(CommonServerPython, 'get_integration_name', return_value='')
from CommonServerPython import BaseClient
new_client = BaseClient('http://example.com/api/v2/')
new_client._http_request('get', 'event')
assert requests_mock.last_request.timeout == timeout
def test_is_valid_ok_codes_empty(self):
from requests import Response
from CommonServerPython import BaseClient
new_client = BaseClient('http://example.com/api/v2/')
response = Response()
response.status_code = 200
assert new_client._is_status_code_valid(response, None)
def test_is_valid_ok_codes_from_function(self):
from requests import Response
response = Response()
response.status_code = 200
assert self.client._is_status_code_valid(response, (200, 201))
def test_is_valid_ok_codes_from_self(self):
from requests import Response
response = Response()
response.status_code = 200
assert self.client._is_status_code_valid(response, None)
def test_is_valid_ok_codes_empty_false(self):
from requests import Response
response = Response()
response.status_code = 400
assert not self.client._is_status_code_valid(response, None)
def test_is_valid_ok_codes_from_function_false(self):
from requests import Response
response = Response()
response.status_code = 400
assert not self.client._is_status_code_valid(response, (200, 201))
def test_is_valid_ok_codes_from_self_false(self):
from requests import Response
response = Response()
response.status_code = 400
assert not self.client._is_status_code_valid(response)
def test_parse_date_string():
# test unconverted data remains: Z
assert parse_date_string('2019-09-17T06:16:39Z') == datetime(2019, 9, 17, 6, 16, 39)
# test unconverted data remains: .22Z
assert parse_date_string('2019-09-17T06:16:39.22Z') == datetime(2019, 9, 17, 6, 16, 39, 220000)
# test time data without ms does not match format with ms
assert parse_date_string('2019-09-17T06:16:39Z', '%Y-%m-%dT%H:%M:%S.%f') == datetime(2019, 9, 17, 6, 16, 39)
# test time data with timezone Z does not match format with timezone +05:00
assert parse_date_string('2019-09-17T06:16:39Z', '%Y-%m-%dT%H:%M:%S+05:00') == datetime(2019, 9, 17, 6, 16, 39)
# test time data with timezone +05:00 does not match format with timezone Z
assert parse_date_string('2019-09-17T06:16:39+05:00', '%Y-%m-%dT%H:%M:%SZ') == datetime(2019, 9, 17, 6, 16, 39)
# test time data with timezone -05:00 and with ms does not match format with timezone +02:00 without ms
assert parse_date_string(
'2019-09-17T06:16:39.4040+05:00', '%Y-%m-%dT%H:%M:%S+02:00'
) == datetime(2019, 9, 17, 6, 16, 39, 404000)
def test_override_print(mocker):
mocker.patch.object(demisto, 'info')
int_logger = IntegrationLogger()
int_logger.set_buffering(False)
int_logger.print_override("test", "this")
assert demisto.info.call_count == 1
assert demisto.info.call_args[0][0] == "test this"
demisto.info.reset_mock()
int_logger.print_override("test", "this", file=sys.stderr)
assert demisto.info.call_count == 1
assert demisto.info.call_args[0][0] == "test this"
buf = StringIO()
# test writing to custom file (not stdout/stderr)
int_logger.print_override("test", "this", file=buf)
assert buf.getvalue() == 'test this\n'
def test_http_client_debug(mocker):
if not IS_PY3:
pytest.skip("test not supported in py2")
return
mocker.patch.object(demisto, 'info')
debug_log = DebugLogger()
from http.client import HTTPConnection
HTTPConnection.debuglevel = 1
# not using 'with' because its not compatible with all python versions
con = HTTPConnection("google.com")
con.request('GET', '/')
with con.getresponse() as r:
r.read()
con.close()
assert demisto.info.call_count > 5
assert debug_log is not None
def test_http_client_debug_int_logger_sensitive_query_params(mocker):
if not IS_PY3:
pytest.skip("test not supported in py2")
return
mocker.patch.object(demisto, 'params', return_value={'APIKey': 'dummy'})
mocker.patch.object(demisto, 'info')
debug_log = DebugLogger()
from http.client import HTTPConnection
HTTPConnection.debuglevel = 1
con = HTTPConnection("google.com")
con.request('GET', '?apikey=dummy')
# not using 'with' because its not compatible with all python versions
with con.getresponse() as r:
r.read()
con.close()
assert debug_log
for arg in demisto.info.call_args_list:
assert 'dummy' not in arg[0][0]
if 'apikey' in arg[0][0]:
assert 'apikey=<XX_REPLACED>' in arg[0][0]
class TestParseDateRange:
@staticmethod
def test_utc_time_sanity():
utc_now = datetime.utcnow()
utc_start_time, utc_end_time = parse_date_range('2 days', utc=True)
# testing UTC date time and range of 2 days
assert utc_now.replace(microsecond=0) == utc_end_time.replace(microsecond=0)
assert abs(utc_start_time - utc_end_time).days == 2
@staticmethod
def test_local_time_sanity():
local_now = datetime.now()
local_start_time, local_end_time = parse_date_range('73 minutes', utc=False)
# testing local datetime and range of 73 minutes
assert local_now.replace(microsecond=0) == local_end_time.replace(microsecond=0)
assert abs(local_start_time - local_end_time).seconds / 60 == 73
@staticmethod
def test_with_trailing_spaces():
utc_now = datetime.utcnow()
utc_start_time, utc_end_time = parse_date_range('2 days ', utc=True)
# testing UTC date time and range of 2 days
assert utc_now.replace(microsecond=0) == utc_end_time.replace(microsecond=0)
assert abs(utc_start_time - utc_end_time).days == 2
@staticmethod
def test_case_insensitive():
utc_now = datetime.utcnow()
utc_start_time, utc_end_time = parse_date_range('2 Days', utc=True)
# testing UTC date time and range of 2 days
assert utc_now.replace(microsecond=0) == utc_end_time.replace(microsecond=0)
assert abs(utc_start_time - utc_end_time).days == 2
@staticmethod
def test_error__invalid_input_format(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('2 Days ago', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'date_range must be "number date_range_unit"' in results['Contents']
@staticmethod
def test_error__invalid_time_value_not_a_number(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('ten Days', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'The time value is invalid' in results['Contents']
@staticmethod
def test_error__invalid_time_value_not_an_integer(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('1.5 Days', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'The time value is invalid' in results['Contents']
@staticmethod
def test_error__invalid_time_unit(mocker):
mocker.patch.object(sys, 'exit', side_effect=Exception('mock exit'))
demisto_results = mocker.spy(demisto, 'results')
try:
parse_date_range('2 nights', utc=True)
except Exception as exp:
assert str(exp) == 'mock exit'
results = demisto.results.call_args[0][0]
assert 'The unit of date_range is invalid' in results['Contents']
def test_encode_string_results():
s = "test"
assert s == encode_string_results(s)
s2 = u"בדיקה"
if IS_PY3:
res = str(s2)
else:
res = s2.encode("utf8")
assert encode_string_results(s2) == res
not_string = [1, 2, 3]
assert not_string == encode_string_results(not_string)
class TestReturnOutputs:
def test_return_outputs(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
return_outputs(md, outputs, raw_response)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
def test_return_outputs_only_md(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
return_outputs(md)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert md == results['HumanReadable']
assert 'text' == results['ContentsFormat']
def test_return_outputs_raw_none(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
return_outputs(md, outputs, None)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert outputs == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
def test_return_outputs_timeline(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
timeline = [{'Value': 'blah', 'Message': 'test', 'Category': 'test'}]
return_outputs(md, outputs, raw_response, timeline)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
assert timeline == results['IndicatorTimeline']
def test_return_outputs_timeline_without_category(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
timeline = [{'Value': 'blah', 'Message': 'test'}]
return_outputs(md, outputs, raw_response, timeline)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
assert 'Category' in results['IndicatorTimeline'][0].keys()
assert results['IndicatorTimeline'][0]['Category'] == 'Integration Update'
def test_return_outputs_ignore_auto_extract(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
outputs = {'Event': 1}
raw_response = {'event': 1}
ignore_auto_extract = True
return_outputs(md, outputs, raw_response, ignore_auto_extract=ignore_auto_extract)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'json' == results['ContentsFormat']
assert outputs == results['EntryContext']
assert md == results['HumanReadable']
assert ignore_auto_extract == results['IgnoreAutoExtract']
def test_return_outputs_text_raw_response(self, mocker):
mocker.patch.object(demisto, 'results')
md = 'md'
raw_response = 'string'
return_outputs(md, raw_response=raw_response)
results = demisto.results.call_args[0][0]
assert len(demisto.results.call_args[0]) == 1
assert demisto.results.call_count == 1
assert raw_response == results['Contents']
assert 'text' == results['ContentsFormat']
def test_argToBoolean():
assert argToBoolean('true') is True
assert argToBoolean('yes') is True
assert argToBoolean('TrUe') is True
assert argToBoolean(True) is True
assert argToBoolean('false') is False
assert argToBoolean('no') is False
assert argToBoolean(False) is False
batch_params = [
# full batch case
([1, 2, 3], 1, [[1], [2], [3]]),
# empty case
([], 1, []),
# out of index case
([1, 2, 3], 5, [[1, 2, 3]]),
# out of index in end with batches
([1, 2, 3, 4, 5], 2, [[1, 2], [3, 4], [5]]),
([1] * 100, 2, [[1, 1]] * 50)
]
@pytest.mark.parametrize('iterable, sz, expected', batch_params)
def test_batch(iterable, sz, expected):
for i, item in enumerate(batch(iterable, sz)):
assert expected[i] == item
regexes_test = [
(ipv4Regex, '192.168.1.1', True),
(ipv4Regex, '192.168.1.1/24', False),
(ipv4Regex, '192.168.a.1', False),
(ipv4Regex, '192.168..1.1', False),
(ipv4Regex, '192.256.1.1', False),
(ipv4Regex, '192.256.1.1.1', False),
(ipv4cidrRegex, '192.168.1.1/32', True),
(ipv4cidrRegex, '192.168.1.1.1/30', False),
(ipv4cidrRegex, '192.168.1.b/30', False),
(ipv4cidrRegex, '192.168.1.12/381', False),
(ipv6Regex, '2001:db8:a0b:12f0::1', True),
(ipv6Regex, '2001:db8:a0b:12f0::1/11', False),
(ipv6Regex, '2001:db8:a0b:12f0::1::1', False),
(ipv6Regex, '2001:db8:a0b:12f0::98aa5', False),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1/64', True),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1/256', False),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1::1/25', False),
(ipv6cidrRegex, '2001:db8:a0b:12f0::1aaasds::1/1', False)
]
@pytest.mark.parametrize('pattern, string, expected', regexes_test)
def test_regexes(pattern, string, expected):
# (str, str, bool) -> None
# emulates re.fullmatch from py3.4
assert expected is bool(re.match("(?:" + pattern + r")\Z", string))
IP_TO_INDICATOR_TYPE_PACK = [
('192.168.1.1', FeedIndicatorType.IP),
('192.168.1.1/32', FeedIndicatorType.CIDR),
('2001:db8:a0b:12f0::1', FeedIndicatorType.IPv6),
('2001:db8:a0b:12f0::1/64', FeedIndicatorType.IPv6CIDR),
]
@pytest.mark.parametrize('ip, indicator_type', IP_TO_INDICATOR_TYPE_PACK)
def test_ip_to_indicator(ip, indicator_type):
assert FeedIndicatorType.ip_to_indicator_type(ip) is indicator_type
data_test_b64_encode = [
(u'test', 'dGVzdA=='),
('test', 'dGVzdA=='),
(b'test', 'dGVzdA=='),
('', ''),
('%', 'JQ=='),
(u'§', 'wqc='),
(u'§t`e§s`t§', 'wqd0YGXCp3NgdMKn'),
]
@pytest.mark.parametrize('_input, expected_output', data_test_b64_encode)
def test_b64_encode(_input, expected_output):
output = b64_encode(_input)
assert output == expected_output, 'b64_encode({}) returns: {} instead: {}'.format(_input, output, expected_output)
def test_traceback_in_return_error_debug_mode_on(mocker):
mocker.patch.object(demisto, 'command', return_value="test-command")
mocker.spy(demisto, 'results')
mocker.patch('CommonServerPython.is_debug_mode', return_value=True)
from CommonServerPython import return_error
try:
raise Exception("This is a test string")
except Exception:
with pytest.raises(SystemExit):
return_error("some text")
assert "This is a test string" in str(demisto.results.call_args)
assert "Traceback" in str(demisto.results.call_args)
assert "some text" in str(demisto.results.call_args)
def test_traceback_in_return_error_debug_mode_off(mocker):
mocker.patch.object(demisto, 'command', return_value="test-command")
mocker.spy(demisto, 'results')
mocker.patch('CommonServerPython.is_debug_mode', return_value=False)
from CommonServerPython import return_error
try:
raise Exception("This is a test string")
except Exception:
with pytest.raises(SystemExit):
return_error("some text")
assert "This is a test string" not in str(demisto.results.call_args)
assert "Traceback" not in str(demisto.results.call_args)
assert "some text" in str(demisto.results.call_args)
# append_context unit test
CONTEXT_MOCK = {
'str_key': 'str_value',
'dict_key': {
'key1': 'val1',
'key2': 'val2'
},
'int_key': 1,
'list_key_str': ['val1', 'val2'],
'list_key_list': ['val1', 'val2'],
'list_key_dict': ['val1', 'val2']
}
UPDATED_CONTEXT = {
'str_key': 'str_data,str_value',
'dict_key': {
'key1': 'val1',
'key2': 'val2',
'data_key': 'data_val'
},
'int_key': [1, 2],
'list_key_str': ['val1', 'val2', 'str_data'],
'list_key_list': ['val1', 'val2', 'val1', 'val2'],
'list_key_dict': ['val1', 'val2', {'data_key': 'data_val'}]
}
DATA_MOCK_STRING = "str_data"
DATA_MOCK_LIST = ['val1', 'val2']
DATA_MOCK_DICT = {
'data_key': 'data_val'
}
DATA_MOCK_INT = 2
STR_KEY = "str_key"
DICT_KEY = "dict_key"
APPEND_CONTEXT_INPUT = [
(CONTEXT_MOCK, DATA_MOCK_STRING, STR_KEY, "key = {}, val = {}".format(STR_KEY, UPDATED_CONTEXT[STR_KEY])),
(CONTEXT_MOCK, DATA_MOCK_LIST, STR_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_DICT, STR_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_STRING, DICT_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_LIST, DICT_KEY, "TypeError"),
(CONTEXT_MOCK, DATA_MOCK_DICT, DICT_KEY, "key = {}, val = {}".format(DICT_KEY, UPDATED_CONTEXT[DICT_KEY])),
(CONTEXT_MOCK, DATA_MOCK_STRING, 'list_key_str',
"key = {}, val = {}".format('list_key_str', UPDATED_CONTEXT['list_key_str'])),
(CONTEXT_MOCK, DATA_MOCK_LIST, 'list_key_list',
"key = {}, val = {}".format('list_key_list', UPDATED_CONTEXT['list_key_list'])),
(CONTEXT_MOCK, DATA_MOCK_DICT, 'list_key_dict',
"key = {}, val = {}".format('list_key_dict', UPDATED_CONTEXT['list_key_dict'])),
(CONTEXT_MOCK, DATA_MOCK_INT, 'int_key', "key = {}, val = {}".format('int_key', UPDATED_CONTEXT['int_key'])),
]
def get_set_context(key, val):
from CommonServerPython import return_error
return_error("key = {}, val = {}".format(key, val))
@pytest.mark.parametrize('context_mock, data_mock, key, expected_answer', APPEND_CONTEXT_INPUT)
def test_append_context(mocker, context_mock, data_mock, key, expected_answer):
from CommonServerPython import demisto
mocker.patch.object(demisto, 'get', return_value=context_mock.get(key))
mocker.patch.object(demisto, 'setContext', side_effect=get_set_context)
mocker.patch.object(demisto, 'results')
if "TypeError" not in expected_answer:
with raises(SystemExit, match='0'):
appendContext(key, data_mock)
assert expected_answer in demisto.results.call_args[0][0]['Contents']
else:
with raises(TypeError) as e:
appendContext(key, data_mock)
assert expected_answer in e.value
INDICATOR_VALUE_AND_TYPE = [
('3fec1b14cea32bbcd97fad4507b06888', "File"),
('1c8893f75089a27ca6a8d49801d7aa6b64ea0c6167fe8b1becfe9bc13f47bdc1', 'File'),
('castaneda-thornton.com', 'Domain'),
('192.0.0.1', 'IP'),
('test@gmail.com', 'Email'),
('e775eb1250137c0b83d4e7c4549c71d6f10cae4e708ebf0b5c4613cbd1e91087', 'File'),
('test@yahoo.com', 'Email'),
('http://test.com', 'URL'),
('11.111.11.11/11', 'CIDR'),
('CVE-0000-0000', 'CVE'),
('dbot@demisto.works', 'Email'),
('37b6d02m-63e0-495e-kk92-7c21511adc7a@SB2APC01FT091.outlook.com', 'Email'),
('dummy@recipient.com', 'Email'),
('image003.gif@01CF4D7F.1DF62650', 'Email'),
('bruce.wayne@pharmtech.zz', 'Email'),
('joe@gmail.com', 'Email'),
('koko@demisto.com', 'Email'),
('42a5e275559a1651b3df8e15d3f5912499f0f2d3d1523959c56fc5aea6371e59', 'File'),
('10676cf66244cfa91567fbc1a937f4cb19438338b35b69d4bcc2cf0d3a44af5e', 'File'),
('52483514f07eb14570142f6927b77deb7b4da99f', 'File'),
('c8092abd8d581750c0530fa1fc8d8318', 'File'),
('fe80:0000:0000:0000:91ba:7558:26d3:acde', 'IPv6'),
('fd60:e22:f1b9::2', 'IPv6'),
('2001:db8:0000:0000:0000:0000:0000:0000', 'IPv6'),
('112.126.94.107', 'IP'),
('a', None),
('*castaneda-thornton.com', 'DomainGlob'),
(
'53e6baa124f54462786f1122e98e38ff1be3de82fe2a96b1849a8637043fd847eec7e0f53307bddf7a066565292d500c36c941f1f3bb9dcac807b2f4a0bfce1b',
'File')
]
@pytest.mark.parametrize('indicator_value, indicatory_type', INDICATOR_VALUE_AND_TYPE)
def test_auto_detect_indicator_type(indicator_value, indicatory_type):
"""
Given
- Indicator value
- Indicator type
When
- Trying to detect the type of an indicator.
Then
- Run the auto_detect_indicator_type and validate that the indicator type the function returns is as expected.
"""
if sys.version_info.major == 3 and sys.version_info.minor == 8:
assert auto_detect_indicator_type(indicator_value) == indicatory_type
else:
try:
auto_detect_indicator_type(indicator_value)
except Exception as e:
assert str(e) == "Missing tldextract module, In order to use the auto detect function please" \
" use a docker image with it installed such as: demisto/jmespath"
def test_auto_detect_indicator_type_tldextract(mocker):
"""
Given
tldextract version is lower than 3.0.0
When
Trying to detect the type of an indicator.
Then
Run the auto_detect_indicator_type and validate that tldextract using `cache_file` arg and not `cache_dir`
"""
if sys.version_info.major == 3 and sys.version_info.minor >= 8:
import tldextract as tlde
tlde.__version__ = '2.2.7'
mocker.patch.object(tlde, 'TLDExtract')
auto_detect_indicator_type('8')
res = tlde.TLDExtract.call_args
assert 'cache_file' in res[1].keys()
def test_handle_proxy(mocker):
os.environ['REQUESTS_CA_BUNDLE'] = '/test1.pem'
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
handle_proxy()
assert os.getenv('REQUESTS_CA_BUNDLE') is None
os.environ['REQUESTS_CA_BUNDLE'] = '/test2.pem'
mocker.patch.object(demisto, 'params', return_value={})
handle_proxy()
assert os.environ['REQUESTS_CA_BUNDLE'] == '/test2.pem' # make sure no change
mocker.patch.object(demisto, 'params', return_value={'unsecure': True})
handle_proxy()
assert os.getenv('REQUESTS_CA_BUNDLE') is None
def test_handle_proxy_without_http_prefix():
"""
Given
proxy is configured in environment vars without http/https prefixes
When
run handle_proxy()
Then
the function will return proxies with http:// prefix
"""
os.environ['HTTP_PROXY'] = 'testproxy:8899'
os.environ['HTTPS_PROXY'] = 'testproxy:8899'
proxies = handle_proxy(checkbox_default_value=True)
assert proxies['http'] == 'http://testproxy:8899'
assert proxies['https'] == 'http://testproxy:8899'
def test_handle_proxy_with_http_prefix():
"""
Given
proxy is configured in environment vars with http/https prefixes
When
run handle_proxy()
Then
the function will return proxies unchanged
"""
os.environ['HTTP_PROXY'] = 'http://testproxy:8899'
os.environ['HTTPS_PROXY'] = 'https://testproxy:8899'
proxies = handle_proxy(checkbox_default_value=True)
assert proxies['http'] == 'http://testproxy:8899'
assert proxies['https'] == 'https://testproxy:8899'
def test_handle_proxy_with_socks5_prefix():
"""
Given
proxy is configured in environment vars with socks5 (socks proxy) prefixes
When
run handle_proxy()
Then
the function will return proxies unchanged
"""
os.environ['HTTP_PROXY'] = 'socks5://testproxy:8899'
os.environ['HTTPS_PROXY'] = 'socks5://testproxy:8899'
proxies = handle_proxy(checkbox_default_value=True)
assert proxies['http'] == 'socks5://testproxy:8899'
assert proxies['https'] == 'socks5://testproxy:8899'
@pytest.mark.parametrize(argnames="dict_obj, keys, expected, default_return_value",
argvalues=[
({'a': '1'}, ['a'], '1', None),
({'a': {'b': '2'}}, ['a', 'b'], '2', None),
({'a': {'b': '2'}}, ['a', 'c'], 'test', 'test'),
])
def test_safe_get(dict_obj, keys, expected, default_return_value):
from CommonServerPython import dict_safe_get
assert expected == dict_safe_get(dict_object=dict_obj,
keys=keys,
default_return_value=default_return_value)
MIRRORS = '''
[{
"channel_id":"GKQ86DVPH",
"channel_name": "incident-681",
"channel_topic": "incident-681",
"investigation_id":"681",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GKB19PA3V",
"channel_name": "group2",
"channel_topic": "cooltopic",
"investigation_id":"684",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GKB19PA3V",
"channel_name": "group2",
"channel_topic": "cooltopic",
"investigation_id":"692",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GKNEJU4P9",
"channel_name": "group3",
"channel_topic": "incident-713",
"investigation_id":"713",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
},
{
"channel_id":"GL8GHC0LV",
"channel_name": "group5",
"channel_topic": "incident-734",
"investigation_id":"734",
"mirror_type":"all",
"mirror_direction":"both",
"mirror_to":"group",
"auto_close":true,
"mirrored":true
}]
'''
CONVERSATIONS = '''[{
"id": "C012AB3CD",
"name": "general",
"is_channel": true,
"is_group": false,
"is_im": false,
"created": 1449252889,
"creator": "U012A3CDE",
"is_archived": false,
"is_general": true,
"unlinked": 0,
"name_normalized": "general",
"is_shared": false,
"is_ext_shared": false,
"is_org_shared": false,
"pending_shared": [],
"is_pending_ext_shared": false,
"is_member": true,
"is_private": false,
"is_mpim": false,
"topic": {
"value": "Company-wide announcements and work-based matters",
"creator": "",
"last_set": 0
},
"purpose": {
"value": "This channel is for team-wide communication and announcements. All team members are in this channel.",
"creator": "",
"last_set": 0
},
"previous_names": [],
"num_members": 4
},
{
"id": "C061EG9T2",
"name": "random",
"is_channel": true,
"is_group": false,
"is_im": false,
"created": 1449252889,
"creator": "U061F7AUR",
"is_archived": false,
"is_general": false,
"unlinked": 0,
"name_normalized": "random",
"is_shared": false,
"is_ext_shared": false,
"is_org_shared": false,
"pending_shared": [],
"is_pending_ext_shared": false,
"is_member": true,
"is_private": false,
"is_mpim": false,
"topic": {
"value": "Non-work banter and water cooler conversation",
"creator": "",
"last_set": 0
},
"purpose": {
"value": "A place for non-work-related flimflam.",
"creator": "",
"last_set": 0
},
"previous_names": [],
"num_members": 4
}]'''
OBJECTS_TO_KEYS = {
'mirrors': 'investigation_id',
'questions': 'entitlement',
'users': 'id'
}
def set_integration_context_versioned(integration_context, version=-1, sync=False):
global INTEGRATION_CONTEXT_VERSIONED
try:
if not INTEGRATION_CONTEXT_VERSIONED:
INTEGRATION_CONTEXT_VERSIONED = {'context': '{}', 'version': 0}
except NameError:
INTEGRATION_CONTEXT_VERSIONED = {'context': '{}', 'version': 0}
current_version = INTEGRATION_CONTEXT_VERSIONED['version']
if version != -1 and version <= current_version:
raise ValueError('DB Insert version {} does not match version {}'.format(current_version, version))
INTEGRATION_CONTEXT_VERSIONED = {'context': integration_context, 'version': current_version + 1}
def get_integration_context_versioned(refresh=False):
return INTEGRATION_CONTEXT_VERSIONED
def test_merge_lists():
from CommonServerPython import merge_lists
# Set
original = [{'id': '1', 'updated': 'n'}, {'id': '2', 'updated': 'n'}, {'id': '11', 'updated': 'n'}]
updated = [{'id': '1', 'updated': 'y'}, {'id': '3', 'updated': 'y'}, {'id': '11', 'updated': 'n', 'remove': True}]
expected = [{'id': '1', 'updated': 'y'}, {'id': '2', 'updated': 'n'}, {'id': '3', 'updated': 'y'}]
# Arrange
result = merge_lists(original, updated, 'id')
# Assert
assert len(result) == len(expected)
for obj in result:
assert obj in expected
@pytest.mark.parametrize('version, expected',
[
({'version': '5.5.0'}, False),
({'version': '6.0.0'}, True),
]
)
def test_is_versioned_context_available(mocker, version, expected):
from CommonServerPython import is_versioned_context_available
# Set
mocker.patch.object(demisto, 'demistoVersion', return_value=version)
# Arrange
result = is_versioned_context_available()
get_demisto_version._version = None
# Assert
assert expected == result
def test_update_context_merge(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
mocker.patch.object(CommonServerPython, 'is_versioned_context_available', return_value=True)
new_mirror = {
'channel_id': 'new_group',
'channel_name': 'incident-999',
'channel_topic': 'incident-999',
'investigation_id': '999',
'mirror_type': 'all',
'mirror_direction': 'both',
'mirror_to': 'group',
'auto_close': True,
'mirrored': False
}
mirrors = json.loads(MIRRORS)
mirrors.extend([new_mirror])
# Arrange
context, version = CommonServerPython.update_integration_context({'mirrors': [new_mirror]}, OBJECTS_TO_KEYS, True)
new_mirrors = json.loads(context['mirrors'])
# Assert
assert len(mirrors) == len(new_mirrors)
for mirror in mirrors:
assert mirror in new_mirrors
assert version == get_integration_context_versioned()['version']
def test_update_context_no_merge(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
mocker.patch.object(CommonServerPython, 'is_versioned_context_available', return_value=True)
new_conversation = {
'id': 'A0123456',
'name': 'general'
}
conversations = json.loads(CONVERSATIONS)
conversations.extend([new_conversation])
# Arrange
context, version = CommonServerPython.update_integration_context({'conversations': conversations}, OBJECTS_TO_KEYS,
True)
new_conversations = json.loads(context['conversations'])
# Assert
assert conversations == new_conversations
assert version == get_integration_context_versioned()['version']
@pytest.mark.parametrize('versioned_available', [True, False])
def test_get_latest_integration_context(mocker, versioned_available):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
mocker.patch.object(CommonServerPython, 'is_versioned_context_available', return_value=versioned_available)
mocker.patch.object(demisto, 'getIntegrationContext',
return_value={'mirrors': MIRRORS, 'conversations': CONVERSATIONS})
# Arrange
context, ver = CommonServerPython.get_integration_context_with_version(True)
# Assert
assert context == get_integration_context_versioned()['context']
assert ver == get_integration_context_versioned()['version'] if versioned_available else -1
def test_set_latest_integration_context(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS,
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
int_context = get_integration_context_versioned()
mocker.patch.object(CommonServerPython, 'update_integration_context',
side_effect=[(int_context['context'], int_context['version']),
(int_context['context'], int_context['version'] + 1)])
mocker.patch.object(CommonServerPython, 'set_integration_context', side_effect=[ValueError, int_context['context']])
# Arrange
CommonServerPython.set_to_integration_context_with_retries({}, OBJECTS_TO_KEYS)
int_context_calls = CommonServerPython.set_integration_context.call_count
int_context_args_1 = CommonServerPython.set_integration_context.call_args_list[0][0]
int_context_args_2 = CommonServerPython.set_integration_context.call_args_list[1][0]
# Assert
assert int_context_calls == 2
assert int_context_args_1 == (int_context['context'], True, int_context['version'])
assert int_context_args_2 == (int_context['context'], True, int_context['version'] + 1)
def test_set_latest_integration_context_es(mocker):
import CommonServerPython
# Set
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
es_inv_context_version_first = {'version': 5, 'sequenceNumber': 807, 'primaryTerm': 1}
es_inv_context_version_second = {'version': 7, 'sequenceNumber': 831, 'primaryTerm': 1}
mocker.patch.object(CommonServerPython, 'update_integration_context',
side_effect=[({}, es_inv_context_version_first),
({}, es_inv_context_version_second)])
mocker.patch.object(CommonServerPython, 'set_integration_context', side_effect=[ValueError, {}])
# Arrange
CommonServerPython.set_to_integration_context_with_retries({})
int_context_calls = CommonServerPython.set_integration_context.call_count
int_context_args_1 = CommonServerPython.set_integration_context.call_args_list[0][0]
int_context_args_2 = CommonServerPython.set_integration_context.call_args_list[1][0]
# Assert
assert int_context_calls == 2
assert int_context_args_1[1:] == (True, es_inv_context_version_first)
assert int_context_args_2[1:] == (True, es_inv_context_version_second)
def test_set_latest_integration_context_fail(mocker):
import CommonServerPython
# Set
set_integration_context_versioned({
'mirrors': MIRRORS,
'conversations': CONVERSATIONS,
})
mocker.patch.object(demisto, 'getIntegrationContextVersioned', return_value=get_integration_context_versioned())
mocker.patch.object(demisto, 'setIntegrationContextVersioned', side_effecet=set_integration_context_versioned)
int_context = get_integration_context_versioned()
mocker.patch.object(CommonServerPython, 'update_integration_context', return_value=(
int_context['context'], int_context['version']
))
mocker.patch.object(CommonServerPython, 'set_integration_context', side_effect=ValueError)
# Arrange
with pytest.raises(Exception):
CommonServerPython.set_to_integration_context_with_retries({}, OBJECTS_TO_KEYS)
int_context_calls = CommonServerPython.set_integration_context.call_count
# Assert
assert int_context_calls == CommonServerPython.CONTEXT_UPDATE_RETRY_TIMES
def test_get_x_content_info_headers(mocker):
test_license = 'TEST_LICENSE_ID'
test_brand = 'TEST_BRAND'
mocker.patch.object(
demisto,
'getLicenseID',
return_value=test_license
)
mocker.patch.object(
demisto,
'callingContext',
new_callable=mocker.PropertyMock(return_value={'context': {
'IntegrationBrand': test_brand,
'IntegrationInstance': 'TEST_INSTANCE',
}})
)
headers = get_x_content_info_headers()
assert headers['X-Content-LicenseID'] == test_license
assert headers['X-Content-Name'] == test_brand
def test_return_results_multiple_command_results(mocker):
"""
Given:
- List of 2 CommandResult
When:
- Calling return_results()
Then:
- demisto.results() is called 2 times (with the list items)
"""
from CommonServerPython import CommandResults, return_results
demisto_results_mock = mocker.patch.object(demisto, 'results')
mock_command_results = []
for i in range(2):
mock_output = {'MockContext': i}
mock_command_results.append(CommandResults(outputs_prefix='Mock', outputs=mock_output))
return_results(mock_command_results)
assert demisto_results_mock.call_count == 2
def test_return_results_multiple_dict_results(mocker):
"""
Given:
- List of 2 dictionaries
When:
- Calling return_results()
Then:
- demisto.results() is called 1 time (with the list as an argument)
"""
from CommonServerPython import return_results
demisto_results_mock = mocker.patch.object(demisto, 'results')
mock_command_results = [{'MockContext': 0}, {'MockContext': 1}]
return_results(mock_command_results)
args, _ = demisto_results_mock.call_args_list[0]
assert demisto_results_mock.call_count == 1
assert [{'MockContext': 0}, {'MockContext': 1}] in args
def test_return_results_mixed_results(mocker):
"""
Given:
- List containing a CommandResult object and two dictionaries (representing a demisto result entries)
When:
- Calling return_results()
Then:
- Assert that demisto.results() is called 2 times .
- Assert that the first call was with the CommandResult object.
- Assert that the second call was with the two demisto results dicts.
"""
from CommonServerPython import CommandResults, return_results
demisto_results_mock = mocker.patch.object(demisto, 'results')
mock_command_results_object = CommandResults(outputs_prefix='Mock', outputs={'MockContext': 0})
mock_demisto_results_entry = [{'MockContext': 1}, {'MockContext': 2}]
return_results([mock_command_results_object] + mock_demisto_results_entry)
assert demisto_results_mock.call_count == 2
assert demisto_results_mock.call_args_list[0][0][0] == mock_command_results_object.to_context()
assert demisto_results_mock.call_args_list[1][0][0] == mock_demisto_results_entry
class TestExecuteCommand:
@staticmethod
def test_sanity(mocker):
"""
Given:
- A successful command with a single entry as output.
When:
- Calling execute_command.
Then:
- Assert that only the Contents value is returned.
"""
from CommonServerPython import execute_command, EntryType
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=[{'Type': EntryType.NOTE,
'Contents': {'hello': 'world'}}])
res = execute_command('command', {'arg1': 'value'})
execute_command_args = demisto_execute_mock.call_args_list[0][0]
assert demisto_execute_mock.call_count == 1
assert execute_command_args[0] == 'command'
assert execute_command_args[1] == {'arg1': 'value'}
assert res == {'hello': 'world'}
@staticmethod
def test_multiple_results(mocker):
"""
Given:
- A successful command with several entries as output.
When:
- Calling execute_command.
Then:
- Assert that the "Contents" values of all entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
{'Type': EntryType.NOTE, 'Contents': {'entry': '2'}},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
res = execute_command('command', {'arg1': 'value'})
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 3
assert res[0] == {'hello': 'world'}
assert res[1] == {}
assert res[2] == {'entry': '2'}
@staticmethod
def test_raw_results(mocker):
"""
Given:
- A successful command with several entries as output.
When:
- Calling execute_command.
Then:
- Assert that the entire entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
'text',
1337,
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
res = execute_command('command', {'arg1': 'value'}, extract_contents=False)
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 4
assert res[0] == {'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}}
assert res[1] == {'Type': EntryType.NOTE, 'Context': 'no contents here'}
assert res[2] == 'text'
assert res[3] == 1337
@staticmethod
def test_failure(mocker):
"""
Given:
- A command that fails.
When:
- Calling execute_command.
Then:
- Assert that the original error is returned to War-Room (using demisto.results).
- Assert an error is returned to the War-Room.
- Function ends the run using SystemExit.
"""
from CommonServerPython import execute_command, EntryType
error_entries = [
{'Type': EntryType.ERROR, 'Contents': 'error number 1'},
{'Type': EntryType.NOTE, 'Contents': 'not an error'},
{'Type': EntryType.ERROR, 'Contents': 'error number 2'},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=error_entries)
demisto_results_mock = mocker.patch.object(demisto, 'results')
with raises(SystemExit, match='0'):
execute_command('bad', {'arg1': 'value'})
assert demisto_execute_mock.call_count == 1
assert demisto_results_mock.call_count == 1
# first call, args (not kwargs), first argument
error_text = demisto_results_mock.call_args_list[0][0][0]['Contents']
assert 'Failed to execute bad.' in error_text
assert 'error number 1' in error_text
assert 'error number 2' in error_text
assert 'not an error' not in error_text
@staticmethod
def test_failure_integration(monkeypatch):
from CommonServerPython import execute_command, EntryType
monkeypatch.delattr(demisto, 'executeCommand')
with raises(DemistoException, match=r'Cannot run demisto.executeCommand\(\) from integrations.'):
execute_command('bad', {'arg1': 'value'})
@staticmethod
def test_multiple_results_fail_on_error_false(mocker):
"""
Given:
- A successful command with several entries as output.
- fail_on_error set to False.
When:
- Calling execute_command.
Then:
- Assert that the status of the execution is True for successful run.
- Assert that the "Contents" values of all entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
{'Type': EntryType.NOTE, 'Contents': {'entry': '2'}},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
status, res = execute_command('command', {'arg1': 'value'}, fail_on_error=False)
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 3
assert status
assert res[0] == {'hello': 'world'}
assert res[1] == {}
assert res[2] == {'entry': '2'}
@staticmethod
def test_raw_results_fail_on_error_false(mocker):
"""
Given:
- A successful command with several entries as output.
- fail_on_error set to False.
When:
- Calling execute_command.
Then:
- Assert that the status of the execution is True for successful run.
- Assert that the entire entries are returned.
"""
from CommonServerPython import execute_command, EntryType
entries = [
{'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}},
{'Type': EntryType.NOTE, 'Context': 'no contents here'},
'text',
1337,
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=entries)
status, res = execute_command('command', {'arg1': 'value'}, extract_contents=False, fail_on_error=False)
assert demisto_execute_mock.call_count == 1
assert isinstance(res, list)
assert len(res) == 4
assert status
assert res[0] == {'Type': EntryType.NOTE, 'Contents': {'hello': 'world'}}
assert res[1] == {'Type': EntryType.NOTE, 'Context': 'no contents here'}
assert res[2] == 'text'
assert res[3] == 1337
@staticmethod
def test_failure_fail_on_error_false(mocker):
"""
Given:
- A command that fails.
- fail_on_error set to False.
When:
- Calling execute_command.
Then:
- Assert that the status of the execution is False for failed run.
- Assert that the original errors are returned as a value, and not to the war-room.
"""
from CommonServerPython import execute_command, EntryType
error_entries = [
{'Type': EntryType.ERROR, 'Contents': 'error number 1'},
{'Type': EntryType.NOTE, 'Contents': 'not an error'},
{'Type': EntryType.ERROR, 'Contents': 'error number 2'},
]
demisto_execute_mock = mocker.patch.object(demisto, 'executeCommand',
return_value=error_entries)
demisto_results_mock = mocker.patch.object(demisto, 'results')
status, error_text = execute_command('bad', {'arg1': 'value'}, fail_on_error=False)
assert demisto_execute_mock.call_count == 1
assert demisto_results_mock.call_count == 0
assert not status
assert 'error number 1' in error_text
assert 'error number 2' in error_text
assert 'not an error' not in error_text
def test_arg_to_int__valid_numbers():
"""
Given
valid numbers
When
converting them to int
Then
ensure proper int returned
"""
from CommonServerPython import arg_to_number
result = arg_to_number(
arg='5',
arg_name='foo')
assert result == 5
result = arg_to_number(
arg='2.0',
arg_name='foo')
assert result == 2
result = arg_to_number(
arg=3,
arg_name='foo')
assert result == 3
result = arg_to_number(
arg=4,
arg_name='foo',
required=True)
assert result == 4
result = arg_to_number(
arg=5,
required=True)
assert result == 5
def test_arg_to_int__invalid_numbers():
"""
Given
invalid numbers
When
converting them to int
Then
raise ValueError
"""
from CommonServerPython import arg_to_number
try:
arg_to_number(
arg='aa',
arg_name='foo')
assert False
except ValueError as e:
assert 'Invalid number' in str(e)
def test_arg_to_int_required():
"""
Given
argument foo which with value None
When
converting the arg to number via required flag as True
Then
ensure ValueError raised
"""
from CommonServerPython import arg_to_number
# required set to false
result = arg_to_number(
arg=None,
arg_name='foo',
required=False)
assert result is None
try:
arg_to_number(
arg=None,
arg_name='foo',
required=True)
assert False
except ValueError as e:
assert 'Missing' in str(e)
try:
arg_to_number(
arg='',
arg_name='foo',
required=True)
assert False
except ValueError as e:
assert 'Missing' in str(e)
try:
arg_to_number(arg='goo')
assert False
except ValueError as e:
assert '"goo" is not a valid number' in str(e)
def test_arg_to_timestamp_valid_inputs():
"""
Given
valid dates provided
When
converting dates into timestamp
Then
ensure returned int which represents timestamp in milliseconds
"""
if sys.version_info.major == 2:
# skip for python 2 - date
assert True
return
from CommonServerPython import arg_to_datetime
from datetime import datetime, timezone
# hard coded date
result = arg_to_datetime(
arg='2020-11-10T21:43:43Z',
arg_name='foo'
)
assert result == datetime(2020, 11, 10, 21, 43, 43, tzinfo=timezone.utc)
# relative dates also work
result = arg_to_datetime(
arg='2 hours ago',
arg_name='foo'
)
assert result > datetime(2020, 11, 10, 21, 43, 43)
# relative dates also work
result = arg_to_datetime(
arg=1581982463,
arg_name='foo'
)
assert int(result.timestamp()) == 1581982463
result = arg_to_datetime(
arg='2 hours ago'
)
assert result > datetime(2020, 11, 10, 21, 43, 43)
def test_arg_to_timestamp_invalid_inputs():
"""
Given
invalid date like 'aaaa' or '2010-32-01'
When
when converting date to timestamp
Then
ensure ValueError is raised
"""
from CommonServerPython import arg_to_datetime
if sys.version_info.major == 2:
# skip for python 2 - date
assert True
return
try:
arg_to_datetime(
arg=None,
arg_name='foo',
required=True)
assert False
except ValueError as e:
assert 'Missing' in str(e)
try:
arg_to_datetime(
arg='aaaa',
arg_name='foo')
assert False
except ValueError as e:
assert 'Invalid date' in str(e)
try:
arg_to_datetime(
arg='2010-32-01',
arg_name='foo')
assert False
except ValueError as e:
assert 'Invalid date' in str(e)
try:
arg_to_datetime(
arg='2010-32-01')
assert False
except ValueError as e:
assert '"2010-32-01" is not a valid date' in str(e)
def test_warnings_handler(mocker):
mocker.patch.object(demisto, 'info')
# need to initialize WarningsHandler as pytest over-rides the handler
with pytest.warns(RuntimeWarning) as r:
warnings.warn("without handler", RuntimeWarning)
handler = WarningsHandler() # noqa
warnings.warn("This is a test", RuntimeWarning)
assert len(r) == 1
assert str(r[0].message) == "without handler"
# call_args is tuple (args list, kwargs). we only need the args
msg = demisto.info.call_args[0][0]
assert 'This is a test' in msg
assert 'python warning' in msg
def test_get_schedule_metadata():
"""
Given
- case 1: no parent entry
- case 2: parent entry with schedule metadata
- case 3: parent entry without schedule metadata
When
querying the schedule metadata
Then
ensure scheduled_metadata is returned correctly
- case 1: no data (empty dict)
- case 2: schedule metadata with all details
- case 3: empty schedule metadata (dict with polling: false)
"""
from CommonServerPython import get_schedule_metadata
# case 1
context = {'ParentEntry': None}
actual_scheduled_metadata = get_schedule_metadata(context=context)
assert actual_scheduled_metadata == {}
# case 2
parent_entry = {
'polling': True,
'pollingCommand': 'foo',
'pollingArgs': {'name': 'foo'},
'timesRan': 5,
'startDate': '2021-04-28T14:20:56.03728+03:00',
'endingDate': '2021-04-28T14:25:35.976244+03:00'
}
context = {
'ParentEntry': parent_entry
}
actual_scheduled_metadata = get_schedule_metadata(context=context)
assert actual_scheduled_metadata.get('is_polling') is True
assert actual_scheduled_metadata.get('polling_command') == parent_entry.get('pollingCommand')
assert actual_scheduled_metadata.get('polling_args') == parent_entry.get('pollingArgs')
assert actual_scheduled_metadata.get('times_ran') == (parent_entry.get('timesRan') + 1)
assert actual_scheduled_metadata.get('startDate') == parent_entry.get('start_date')
assert actual_scheduled_metadata.get('startDate') == parent_entry.get('start_date')
# case 3
parent_entry = {
'polling': False
}
context = {
'ParentEntry': parent_entry
}
actual_scheduled_metadata = get_schedule_metadata(context=context)
assert actual_scheduled_metadata == {'is_polling': False, 'times_ran': 1}
class TestCommonTypes:
def test_create_domain(self):
from CommonServerPython import CommandResults, Common, EntryType, EntryFormat, DBotScoreType
dbot_score = Common.DBotScore(
indicator='somedomain.com',
integration_name='Test',
indicator_type=DBotScoreType.DOMAIN,
score=Common.DBotScore.GOOD
)
domain = Common.Domain(
domain='somedomain.com',
dbot_score=dbot_score,
dns='dns.somedomain',
detection_engines=10,
positive_detections=5,
organization='Some Organization',
admin_phone='18000000',
admin_email='admin@test.com',
registrant_name='Mr Registrant',
registrar_name='Mr Registrar',
registrar_abuse_email='registrar@test.com',
creation_date='2019-01-01T00:00:00',
updated_date='2019-01-02T00:00:00',
expiration_date=None,
domain_status='ACTIVE',
name_servers=[
'PNS31.CLOUDNS.NET',
'PNS32.CLOUDNS.NET'
],
sub_domains=[
'sub-domain1.somedomain.com',
'sub-domain2.somedomain.com',
'sub-domain3.somedomain.com'
],
tags=['tag1', 'tag2'],
malware_family=['malware_family1', 'malware_family2'],
feed_related_indicators=[Common.FeedRelatedIndicators(
value='8.8.8.8',
indicator_type="IP",
description='test'
)],
domain_idn_name='domain_idn_name',
port='port',
internal="False",
category='category',
campaign='campaign',
traffic_light_protocol='traffic_light_protocol',
threat_types=[Common.ThreatTypes(threat_category='threat_category',
threat_category_confidence='threat_category_confidence')],
community_notes=[Common.CommunityNotes(note='note', timestamp='2019-01-01T00:00:00')],
publications=[Common.Publications(title='title', source='source', timestamp='2019-01-01T00:00:00',
link='link')],
geo_location='geo_location',
geo_country='geo_country',
geo_description='geo_description',
tech_country='tech_country',
tech_name='tech_name',
tech_organization='tech_organization',
tech_email='tech_email',
billing='billing'
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[domain]
)
assert results.to_context() == {
'Type': 1,
'ContentsFormat': 'json',
'Contents': None,
'HumanReadable': None,
'EntryContext': {
'Domain(val.Name && val.Name == obj.Name)': [
{
"Name": "somedomain.com",
"DNS": "dns.somedomain",
"DetectionEngines": 10,
"PositiveDetections": 5,
"Registrar": {
"Name": "Mr Registrar",
"AbuseEmail": "registrar@test.com",
"AbusePhone": None
},
"Registrant": {
"Name": "Mr Registrant",
"Email": None,
"Phone": None,
"Country": None
},
"Admin": {
"Name": None,
"Email": "admin@test.com",
"Phone": "18000000",
"Country": None
},
"Organization": "Some Organization",
"Subdomains": [
"sub-domain1.somedomain.com",
"sub-domain2.somedomain.com",
"sub-domain3.somedomain.com"
],
"DomainStatus": "ACTIVE",
"CreationDate": "2019-01-01T00:00:00",
"UpdatedDate": "2019-01-02T00:00:00",
"NameServers": [
"PNS31.CLOUDNS.NET",
"PNS32.CLOUDNS.NET"
],
"Tags": ["tag1", "tag2"],
"FeedRelatedIndicators": [{"value": "8.8.8.8", "type": "IP", "description": "test"}],
"MalwareFamily": ["malware_family1", "malware_family2"],
"DomainIDNName": "domain_idn_name",
"Port": "port",
"Internal": "False",
"Category": "category",
"Campaign": "campaign",
"TrafficLightProtocol": "traffic_light_protocol",
"ThreatTypes": [{
"threatcategory": "threat_category",
"threatcategoryconfidence": "threat_category_confidence"
}],
"CommunityNotes": [{
"note": "note",
"timestamp": "2019-01-01T00:00:00"
}],
"Publications": [{
"source": "source",
"title": "title",
"link": "link",
"timestamp": "2019-01-01T00:00:00"
}],
"Geo": {
"Location": "geo_location",
"Country": "geo_country",
"Description": "geo_description"
},
"Tech": {
"Country": "tech_country",
"Name": "tech_name",
"Organization": "tech_organization",
"Email": "tech_email"
},
"Billing": "billing",
"WHOIS": {
"Registrar": {
"Name": "Mr Registrar",
"AbuseEmail": "registrar@test.com",
"AbusePhone": None
},
"Registrant": {
"Name": "Mr Registrant",
"Email": None,
"Phone": None,
"Country": None
},
"Admin": {
"Name": None,
"Email": "admin@test.com",
"Phone": "18000000",
"Country": None
},
"DomainStatus": "ACTIVE",
"CreationDate": "2019-01-01T00:00:00",
"UpdatedDate": "2019-01-02T00:00:00",
"NameServers": [
"PNS31.CLOUDNS.NET",
"PNS32.CLOUDNS.NET"
]
}
}
],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [
{
'Indicator': 'somedomain.com',
'Type': 'domain',
'Vendor': 'Test',
'Score': 1
}
]
},
'IndicatorTimeline': [],
'IgnoreAutoExtract': False,
'Note': False,
'Relationships': []
}
def test_create_certificate(self):
"""
Given:
- an X509 Certificate with its properties
When
- creating a CommandResults with the Certificate Standard Context
Then
- the proper output Context is created
"""
from CommonServerPython import CommandResults, Common, EntryType, EntryFormat, DBotScoreType
dbot_score = Common.DBotScore(
indicator='bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d',
integration_name='Test',
indicator_type=DBotScoreType.CERTIFICATE,
score=Common.DBotScore.NONE
)
cert_extensions = [
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.AUTHORITYKEYIDENTIFIER,
authority_key_identifier=Common.CertificateExtension.AuthorityKeyIdentifier(
key_identifier="0f80611c823161d52f28e78d4638b42ce1c6d9e2"
),
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.SUBJECTKEYIDENTIFIER,
digest="b34972bb12121b8851cd5564ff9656dcbca3f288",
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.SUBJECTALTERNATIVENAME,
subject_alternative_names=[
Common.GeneralName(
gn_type="dNSName",
gn_value="*.paloaltonetworks.com"
),
Common.GeneralName(
gn_type="dNSName",
gn_value="paloaltonetworks.com"
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.KEYUSAGE,
digital_signature=True,
key_encipherment=True,
critical=True
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.EXTENDEDKEYUSAGE,
usages=[
"serverAuth",
"clientAuth"
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.CRLDISTRIBUTIONPOINTS,
distribution_points=[
Common.CertificateExtension.DistributionPoint(
full_name=[
Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://crl3.digicert.com/ssca-sha2-g7.crl"
)
]
),
Common.CertificateExtension.DistributionPoint(
full_name=[
Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://crl4.digicert.com/ssca-sha2-g7.crl"
)
]
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.CERTIFICATEPOLICIES,
certificate_policies=[
Common.CertificateExtension.CertificatePolicy(
policy_identifier="2.16.840.1.114412.1.1",
policy_qualifiers=["https://www.digicert.com/CPS"]
),
Common.CertificateExtension.CertificatePolicy(
policy_identifier="2.23.140.1.2.2"
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.AUTHORITYINFORMATIONACCESS,
authority_information_access=[
Common.CertificateExtension.AuthorityInformationAccess(
access_method="OCSP",
access_location=Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://ocsp.digicert.com"
)
),
Common.CertificateExtension.AuthorityInformationAccess(
access_method="caIssuers",
access_location=Common.GeneralName(
gn_type="uniformResourceIdentifier",
gn_value="http://cacerts.digicert.com/DigiCertSHA2SecureServerCA.crt"
)
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.BASICCONSTRAINTS,
basic_constraints=Common.CertificateExtension.BasicConstraints(
ca=False
),
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.PRESIGNEDCERTIFICATETIMESTAMPS,
signed_certificate_timestamps=[
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="PreCertificate"
),
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="PreCertificate"
)
],
critical=False
),
Common.CertificateExtension(
extension_type=Common.CertificateExtension.ExtensionType.SIGNEDCERTIFICATETIMESTAMPS,
signed_certificate_timestamps=[
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="X509Certificate"
),
Common.CertificateExtension.SignedCertificateTimestamp(
version=0,
log_id="5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
timestamp="2020-10-23T19:31:49.000Z",
entry_type="X509Certificate"
)
],
critical=False
)
]
certificate = Common.Certificate(
subject_dn='CN=*.paloaltonetworks.com,O=Palo Alto Networks\\, Inc.,L=Santa Clara,ST=California,C=US',
dbot_score=dbot_score,
serial_number='19290688218337824112020565039390569720',
issuer_dn='CN=DigiCert SHA2 Secure Server CA,O=DigiCert Inc,C=US',
validity_not_before='2020-10-23T00:00:00.000Z',
validity_not_after='2021-11-21T23:59:59.000Z',
sha256='bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d',
sha1='2392ea5cd4c2a61e51547570634ef887ab1942e9',
md5='22769ae413997b86da4a0934072d9ed0',
publickey=Common.CertificatePublicKey(
algorithm=Common.CertificatePublicKey.Algorithm.RSA,
length=2048,
modulus='00:00:00:00',
exponent=65537
),
spki_sha256='94b716aeda21cd661949cfbf3f55457a277da712cdce0ab31989a4f288fad9b9',
signature_algorithm='sha256',
signature='SIGNATURE',
extensions=cert_extensions
)
results = CommandResults(
outputs_key_field=None,
outputs_prefix=None,
outputs=None,
indicators=[certificate]
)
CONTEXT_PATH = "Certificate(val.MD5 && val.MD5 == obj.MD5 || val.SHA1 && val.SHA1 == obj.SHA1 || " \
"val.SHA256 && val.SHA256 == obj.SHA256 || val.SHA512 && val.SHA512 == obj.SHA512)"
assert results.to_context() == {
'Type': EntryType.NOTE,
'ContentsFormat': EntryFormat.JSON,
'Contents': None,
'HumanReadable': None,
'EntryContext': {
CONTEXT_PATH: [{
"SubjectDN": "CN=*.paloaltonetworks.com,O=Palo Alto Networks\\, Inc.,L=Santa Clara,ST=California,C=US",
"SubjectAlternativeName": [
{
"Type": "dNSName",
"Value": "*.paloaltonetworks.com"
},
{
"Type": "dNSName",
"Value": "paloaltonetworks.com"
}
],
"Name": [
"*.paloaltonetworks.com",
"paloaltonetworks.com"
],
"IssuerDN": "CN=DigiCert SHA2 Secure Server CA,O=DigiCert Inc,C=US",
"SerialNumber": "19290688218337824112020565039390569720",
"ValidityNotBefore": "2020-10-23T00:00:00.000Z",
"ValidityNotAfter": "2021-11-21T23:59:59.000Z",
"SHA256": "bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d",
"SHA1": "2392ea5cd4c2a61e51547570634ef887ab1942e9",
"MD5": "22769ae413997b86da4a0934072d9ed0",
"PublicKey": {
"Algorithm": "RSA",
"Length": 2048,
"Modulus": "00:00:00:00",
"Exponent": 65537
},
"SPKISHA256": "94b716aeda21cd661949cfbf3f55457a277da712cdce0ab31989a4f288fad9b9",
"Signature": {
"Algorithm": "sha256",
"Signature": "SIGNATURE"
},
"Extension": [
{
"OID": "2.5.29.35",
"Name": "authorityKeyIdentifier",
"Critical": False,
"Value": {
"KeyIdentifier": "0f80611c823161d52f28e78d4638b42ce1c6d9e2"
}
},
{
"OID": "2.5.29.14",
"Name": "subjectKeyIdentifier",
"Critical": False,
"Value": {
"Digest": "b34972bb12121b8851cd5564ff9656dcbca3f288"
}
},
{
"OID": "2.5.29.17",
"Name": "subjectAltName",
"Critical": False,
"Value": [
{
"Type": "dNSName",
"Value": "*.paloaltonetworks.com"
},
{
"Type": "dNSName",
"Value": "paloaltonetworks.com"
}
]
},
{
"OID": "2.5.29.15",
"Name": "keyUsage",
"Critical": True,
"Value": {
"DigitalSignature": True,
"KeyEncipherment": True
}
},
{
"OID": "2.5.29.37",
"Name": "extendedKeyUsage",
"Critical": False,
"Value": {
"Usages": [
"serverAuth",
"clientAuth"
]
}
},
{
"OID": "2.5.29.31",
"Name": "cRLDistributionPoints",
"Critical": False,
"Value": [
{
"FullName": [
{
"Type": "uniformResourceIdentifier",
"Value": "http://crl3.digicert.com/ssca-sha2-g7.crl"
}
]
},
{
"FullName": [
{
"Type": "uniformResourceIdentifier",
"Value": "http://crl4.digicert.com/ssca-sha2-g7.crl"
}
]
}
]
},
{
"OID": "2.5.29.32",
"Name": "certificatePolicies",
"Critical": False,
"Value": [
{
"PolicyIdentifier": "2.16.840.1.114412.1.1",
"PolicyQualifiers": [
"https://www.digicert.com/CPS"
]
},
{
"PolicyIdentifier": "2.23.140.1.2.2"
}
]
},
{
"OID": "1.3.6.1.5.5.7.1.1",
"Name": "authorityInfoAccess",
"Critical": False,
"Value": [
{
"AccessMethod": "OCSP",
"AccessLocation": {
"Type": "uniformResourceIdentifier",
"Value": "http://ocsp.digicert.com"
}
},
{
"AccessMethod": "caIssuers",
"AccessLocation": {
"Type": "uniformResourceIdentifier",
"Value": "http://cacerts.digicert.com/DigiCertSHA2SecureServerCA.crt"
}
}
]
},
{
"OID": "2.5.29.19",
"Name": "basicConstraints",
"Critical": False,
"Value": {
"CA": False
}
},
{
"OID": "1.3.6.1.4.1.11129.2.4.2",
"Name": "signedCertificateTimestampList",
"Critical": False,
"Value": [
{
"Version": 0,
"LogId": "f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "PreCertificate"
},
{
"Version": 0,
"LogId": "5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "PreCertificate"
}
]
},
{
"OID": "1.3.6.1.4.1.11129.2.4.5",
"Name": "signedCertificateTimestampList",
"Critical": False,
"Value": [
{
"Version": 0,
"LogId": "f65c942fd1773022145418083094568ee34d131933bfdf0c2f200bcc4ef164e3",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "X509Certificate"
},
{
"Version": 0,
"LogId": "5cdc4392fee6ab4544b15e9ad456e61037fbd5fa47dca17394b25ee6f6c70eca",
"Timestamp": "2020-10-23T19:31:49.000Z",
"EntryType": "X509Certificate"
}
]
}
]
}],
'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)': [{
"Indicator": "bc33cf76519f1ec5ae7f287f321df33a7afd4fd553f364cf3c753f91ba689f8d",
"Type": "certificate",
"Vendor": "Test",
"Score": 0
}]
},
'IndicatorTimeline': [],
'Relationships': [],
'IgnoreAutoExtract': False,
'Note': False
}
def test_email_indicator_type(self, mocker):
"""
Given:
- a single email indicator entry
When
- creating an Common.EMAIL object
Then
- The context created matches the data entry
"""
from CommonServerPython import Common, DBotScoreType
mocker.patch.object(demisto, 'params', return_value={'insecure': True})
dbot_score = Common.DBotScore(
indicator='user@example.com',
integration_name='Test',
indicator_type=DBotScoreType.EMAIL,
score=Common.DBotScore.GOOD
)
dbot_context = {'DBotScore(val.Indicator && val.Indicator == obj.Indicator && '
'val.Vendor == obj.Vendor && val.Type == obj.Type)':
{'Indicator': 'user@example.com', 'Type': 'email', 'Vendor': 'Test', 'Score': 1}}
assert dbot_context == dbot_score.to_context()
email_context = Common.EMAIL(
domain='example.com',
address='user@example.com',
dbot_score=dbot_score
)
assert email_context.to_context()[email_context.CONTEXT_PATH] == {'Address': 'user@example.com',
'Domain': 'example.com'}
class TestIndicatorsSearcher:
def mock_search_after_output(self, fromDate='', toDate='', query='', size=0, value='', page=0, searchAfter='',
populateFields=None):
if not searchAfter:
searchAfter = 0
iocs = [{'value': 'mock{}'.format(searchAfter)}]
if searchAfter < 6:
searchAfter += 1
else:
# mock the end of indicators
searchAfter = None
if page and page >= 17:
# checking a unique case when trying to reach a certain page and not all the indicators
iocs = []
searchAfter = None
return {'searchAfter': searchAfter, 'iocs': iocs, 'total': 7}
def mock_search_indicators_search_after(self, fromDate='', toDate='', query='', size=0, value='', page=0,
searchAfter=None, populateFields=None):
"""
Mocks search indicators returning different results for searchAfter value:
- None: {searchAfter: 0, iocs: [...]}
- 0-2: {searchAfter: i+1, iocs: [...]}
- 3+: {searchAfter: None, iocs: []}
total of 4 iocs available
"""
search_after_options = (0, 1, 2)
if searchAfter is None:
search_after_value = search_after_options[0]
else:
if searchAfter in search_after_options:
search_after_value = searchAfter + 1
else:
return {'searchAfter': None, 'iocs': []}
iocs = [{'value': 'mock{}'.format(search_after_value)}]
return {'searchAfter': search_after_value, 'iocs': iocs, 'total': 4}
def test_search_indicators_by_page(self, mocker):
"""
Given:
- Searching indicators couple of times
- Server version in less than 6.1.0
When:
- Mocking search indicators using paging
Then:
- The page number is rising
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_paging = IndicatorsSearcher()
search_indicators_obj_paging._can_use_search_after = False
for n in range(5):
search_indicators_obj_paging.search_indicators_by_version()
assert search_indicators_obj_paging._page == 5
def test_search_indicators_by_search_after(self, mocker):
"""
Given:
- Searching indicators couple of times
- Server version in equal or higher than 6.1.0
When:
- Mocking search indicators using the searchAfter parameter
Then:
- The search after param is rising
- The page param is rising
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_search_after = IndicatorsSearcher()
search_indicators_obj_search_after._can_use_search_after = True
try:
for n in range(5):
search_indicators_obj_search_after.search_indicators_by_version()
except Exception as e:
print(e)
assert search_indicators_obj_search_after._search_after_param == 5
assert search_indicators_obj_search_after._page == 5
def test_search_all_indicators_by_search_after(self, mocker):
"""
Given:
- Searching indicators couple of times
- Server version in equal or higher than 6.1.0
When:
- Mocking search indicators using the searchAfter parameter until there are no more indicators
so search_after is None
Then:
- The search after param is None
- The page param is rising
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_search_after = IndicatorsSearcher()
search_indicators_obj_search_after._can_use_search_after = True
for n in range(7):
search_indicators_obj_search_after.search_indicators_by_version()
assert search_indicators_obj_search_after._search_after_param is None
assert search_indicators_obj_search_after._page == 7
def test_search_indicators_in_certain_page(self, mocker):
"""
Given:
- Searching indicators in a specific page that is not 0
- Server version in less than 6.1.0
When:
- Mocking search indicators in this specific page
so search_after is None
Then:
- The search after param is not None
- The page param is 17
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators_obj_search_after = IndicatorsSearcher(page=17)
search_indicators_obj_search_after._can_use_search_after = False
search_indicators_obj_search_after.search_indicators_by_version()
assert search_indicators_obj_search_after._search_after_param is None
assert search_indicators_obj_search_after._page == 18
def test_iterator__pages(self, mocker):
"""
Given:
- Searching indicators from page 1
- Total available indicators == 6
When:
- Searching indicators using iterator
Then:
- Get 6 indicators
- Advance page to 7
- is_search_done returns True
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators = IndicatorsSearcher(page=1, size=1)
search_indicators._can_use_search_after = False
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 6
assert search_indicators.page == 7
assert search_indicators.is_search_done() is True
def test_iterator__search_after(self, mocker):
"""
Given:
- Searching indicators from first page
- Total available indicators == 7
- Limit is set to 10
When:
- Searching indicators using iterator
- search_after is supported
Then:
- Get 7 indicators
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_indicators_search_after)
search_indicators = IndicatorsSearcher(limit=10)
search_indicators._can_use_search_after = True
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 4
def test_iterator__empty_page(self, mocker):
"""
Given:
- Searching indicators from page 18
- Total available indicators from page 10-16 == 7
- No available indicators from page 17
When:
- Searching indicators using iterator (search_after is not supported)
Then:
- Get 0 indicators
- page doesn't advance (set to 18)
"""
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_after_output)
search_indicators = IndicatorsSearcher(page=18)
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 0
assert search_indicators.page == 19
def test_iterator__research_flow(self, mocker):
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(demisto, 'searchIndicators', side_effect=self.mock_search_indicators_search_after)
# fetch first 3
search_indicators = IndicatorsSearcher(limit=3)
search_indicators._can_use_search_after = True
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 3
# fetch 1 more (limit set to 2, but only 1 available)
search_indicators.limit += 2
results = []
for res in search_indicators:
results.append(res)
assert len(results) == 1
class TestAutoFocusKeyRetriever:
def test_instantiate_class_with_param_key(self, mocker, clear_version_cache):
"""
Given:
- giving the api_key parameter
When:
- Mocking getAutoFocusApiKey
- Mocking server version to be 6.2.0
Then:
- The Auto Focus API Key is the one given to the class
"""
from CommonServerPython import AutoFocusKeyRetriever
mocker.patch.object(demisto, 'getAutoFocusApiKey', return_value='test')
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.2.0', 'buildNumber': '62000'})
auto_focus_key_retriever = AutoFocusKeyRetriever(api_key='1234')
assert auto_focus_key_retriever.key == '1234'
def test_instantiate_class_pre_6_2_failed(self, mocker, clear_version_cache):
"""
Given:
- not giving the api_key parameter
When:
- Mocking getAutoFocusApiKey
- Mocking server version to be 6.1.0
Then:
- Validate an exception with appropriate error message is raised.
"""
from CommonServerPython import AutoFocusKeyRetriever
mocker.patch.object(demisto, 'getAutoFocusApiKey', return_value='test')
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.1.0', 'buildNumber': '61000'})
with raises(DemistoException, match='For versions earlier than 6.2.0, configure an API Key.'):
AutoFocusKeyRetriever(api_key='')
def test_instantiate_class_without_param_key(self, mocker, clear_version_cache):
"""
Given:
- not giving the api_key parameter
When:
- Mocking getAutoFocusApiKey
- Mocking server version to be 6.2.0
Then:
- The Auto Focus API Key is the one given by the getAutoFocusApiKey method
"""
from CommonServerPython import AutoFocusKeyRetriever
mocker.patch.object(demisto, 'getAutoFocusApiKey', return_value='test')
mocker.patch.object(demisto, 'demistoVersion', return_value={'version': '6.2.0', 'buildNumber': '62000'})
auto_focus_key_retriever = AutoFocusKeyRetriever(api_key='')
assert auto_focus_key_retriever.key == 'test'
class TestEntityRelationship:
"""Global vars for all of the tests"""
name = 'related-to'
reverse_name = 'related-to'
relationship_type = 'IndicatorToIndicator'
entity_a = 'test1'
entity_a_family = 'Indicator'
entity_a_type = 'Domain'
entity_b = 'test2'
entity_b_family = 'Indicator'
entity_b_type = 'Domain'
source_reliability = 'F - Reliability cannot be judged'
def test_entity_relations_context(self):
"""
Given
- an EntityRelationship object.
When
- running to_context function of the object
Then
- Validate that the expected context is created
"""
from CommonServerPython import EntityRelationship
relationship = EntityRelationship(name='related-to',
relationship_type='IndicatorToIndicator',
entity_a='test1',
entity_a_family='Indicator',
entity_a_type='Domain',
entity_b='test2',
entity_b_family='Indicator',
entity_b_type='Domain',
source_reliability='F - Reliability cannot be judged',
brand='test')
expected_context = {
"Relationship": 'related-to',
"EntityA": 'test1',
"EntityAType": 'Domain',
"EntityB": 'test2',
"EntityBType": 'Domain',
}
assert relationship.to_context() == expected_context
def test_entity_relations_to_entry(self):
"""
Given
- an EntityRelationship object.
When
- running to_entry function of the object
Then
- Validate that the expected context is created
"""
from CommonServerPython import EntityRelationship
relationship = EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type,
source_reliability=TestEntityRelationship.source_reliability
)
expected_entry = {
"name": TestEntityRelationship.name,
"reverseName": TestEntityRelationship.reverse_name,
"type": TestEntityRelationship.relationship_type,
"entityA": TestEntityRelationship.entity_a,
"entityAFamily": TestEntityRelationship.entity_a_family,
"entityAType": TestEntityRelationship.entity_a_type,
"entityB": TestEntityRelationship.entity_b,
"entityBFamily": TestEntityRelationship.entity_b_family,
"entityBType": TestEntityRelationship.entity_b_type,
"fields": {},
"reliability": TestEntityRelationship.source_reliability
}
assert relationship.to_entry() == expected_entry
def test_entity_relations_to_indicator(self):
"""
Given
- an EntityRelationship object.
When
- running to_indicator function of the object
Then
- Validate that the expected context is created
"""
from CommonServerPython import EntityRelationship
relationship = EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type,
)
expected_to_indicator = {
"name": TestEntityRelationship.name,
"reverseName": TestEntityRelationship.reverse_name,
"type": TestEntityRelationship.relationship_type,
"entityA": TestEntityRelationship.entity_a,
"entityAFamily": TestEntityRelationship.entity_a_family,
"entityAType": TestEntityRelationship.entity_a_type,
"entityB": TestEntityRelationship.entity_b,
"entityBFamily": TestEntityRelationship.entity_b_family,
"entityBType": TestEntityRelationship.entity_b_type,
"fields": {},
}
assert relationship.to_indicator() == expected_to_indicator
def test_invalid_name_init(self):
"""
Given
- an EntityRelation object which has a invalid relation name.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name='ilegal',
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid relationship: ilegal" in str(exception)
def test_invalid_relation_type_init(self):
"""
Given
- an EntityRelation object which has a invalid relation type.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type='TestRelationshipType',
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid relationship type: TestRelationshipType" in str(exception)
def test_invalid_a_family_init(self):
"""
Given
- an EntityRelation object which has a invalid family type of the source.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family='IndicatorIlegal',
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid entity A Family type: IndicatorIlegal" in str(exception)
def test_invalid_a_type_init(self):
"""
Given
- an EntityRelation object which has a invalid type of the source.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type='DomainTest',
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid entity A type: DomainTest" in str(exception)
def test_invalid_b_family_init(self):
"""
Given
- an EntityRelation object which has a invalid family type of the destination.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family='IndicatorIlegal',
entity_b_type=TestEntityRelationship.entity_b_type
)
except ValueError as exception:
assert "Invalid entity B Family type: IndicatorIlegal" in str(exception)
def test_invalid_b_type_init(self):
"""
Given
- an EntityRelation object which has a invalid type of the destination.
When
- Creating the EntityRelation object.
Then
- Validate a ValueError is raised.
"""
from CommonServerPython import EntityRelationship
try:
EntityRelationship(name=TestEntityRelationship.name,
relationship_type=TestEntityRelationship.relationship_type,
entity_a=TestEntityRelationship.entity_a,
entity_a_family=TestEntityRelationship.entity_a_family,
entity_a_type=TestEntityRelationship.entity_a_type,
entity_b=TestEntityRelationship.entity_b,
entity_b_family=TestEntityRelationship.entity_b_family,
entity_b_type='DomainTest'
)
except ValueError as exception:
assert "Invalid entity B type: DomainTest" in str(exception)
class TestSetAndGetLastRun:
def test_get_last_run_in_6_2_when_get_last_run_has_results(self, mocker):
"""
Given: 6.2.0 environment and getLastRun returns results
When: Fetch indicators
Then: Returning all indicators from demisto.getLastRun object
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mocker.patch.object(demisto, 'getLastRun', return_value={1: "first indicator"})
result = get_feed_last_run()
assert result == {1: "first indicator"}
def test_get_last_run_in_6_1_when_get_integration_context_has_results(self, mocker):
"""
Given: 6.1.0 environment and getIntegrationContext return results
When: Fetch indicators
This can happen when updating XSOAR version to 6.2.0 while a feed instance is already set.
Then: Returning all indicators from demisto.getIntegrationContext object
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.1.0"})
mocker.patch.object(demisto, 'getIntegrationContext', return_value={1: "first indicator"})
result = get_feed_last_run()
assert result == {1: "first indicator"}
def test_get_last_run_in_6_2_when_get_last_run_has_no_results(self, mocker):
"""
Given: 6.2.0 environment and getLastRun and getIntegrationContext are empty
When: Fetch indicators
Then: function will return empty dict
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mocker.patch.object(demisto, 'getIntegrationContext', return_value={})
mocker.patch.object(demisto, 'getLastRun', return_value={})
result = get_feed_last_run()
assert result == {}
def test_get_last_run_in_6_2_when_get_last_is_empty_and_get_integration_is_not(self, mocker):
"""
Given: 6.2.0 environment and getLastRun is empty and getIntegrationContext has results.
When: Fetch indicators
Then: function will return empty dict
"""
import demistomock as demisto
from CommonServerPython import get_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
mocker.patch.object(demisto, 'getIntegrationContext', return_value={1: "first indicator"})
mocker.patch.object(demisto, 'getLastRun', return_value={})
set_last_run = mocker.patch.object(demisto, 'setLastRun', return_value={})
set_integration_context = mocker.patch.object(demisto, 'setIntegrationContext', return_value={})
result = get_feed_last_run()
assert result == {1: "first indicator"}
set_last_run.assert_called_with({1: "first indicator"})
set_integration_context.assert_called_with({})
def test_set_last_run_in_6_2(self, mocker):
"""
Given: 6.2.0 environment
When: Fetch indicators
Then: Using demisto.setLastRun to save results
"""
import demistomock as demisto
from CommonServerPython import set_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.2.0"})
set_last_run = mocker.patch.object(demisto, 'setLastRun', return_value={})
set_integration_context = mocker.patch.object(demisto, 'setIntegrationContext', return_value={})
set_feed_last_run({1: "first indicator"})
assert set_integration_context.called is False
set_last_run.assert_called_with({1: "first indicator"})
def test_set_last_run_in_6_1(self, mocker):
"""
Given: 6.1.0 environment
When: Fetch indicators
Then: Using demisto.setIntegrationContext to save results
"""
import demistomock as demisto
from CommonServerPython import set_feed_last_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.1.0"})
set_last_run = mocker.patch.object(demisto, 'setLastRun', return_value={})
set_integration_context = mocker.patch.object(demisto, 'setIntegrationContext', return_value={})
set_feed_last_run({1: "first indicator"})
set_integration_context.assert_called_with({1: "first indicator"})
assert set_last_run.called is False
class TestIsDemistoServerGE:
@classmethod
@pytest.fixture(scope='function', autouse=True)
def clear_cache(cls):
get_demisto_version._version = None
def test_get_demisto_version(self, mocker):
# verify expected server version and build returned in case Demisto class has attribute demistoVersion
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '5.0.0',
'buildNumber': '50000'
}
)
assert get_demisto_version() == {
'version': '5.0.0',
'buildNumber': '50000'
}
# call again to check cache
assert get_demisto_version() == {
'version': '5.0.0',
'buildNumber': '50000'
}
# call count should be 1 as we cached
assert demisto.demistoVersion.call_count == 1
# test is_demisto_version_ge
assert is_demisto_version_ge('5.0.0')
assert is_demisto_version_ge('4.5.0')
assert not is_demisto_version_ge('5.5.0')
assert get_demisto_version_as_str() == '5.0.0-50000'
def test_is_demisto_version_ge_4_5(self, mocker):
get_version_patch = mocker.patch('CommonServerPython.get_demisto_version')
get_version_patch.side_effect = AttributeError('simulate missing demistoVersion')
assert not is_demisto_version_ge('5.0.0')
assert not is_demisto_version_ge('6.0.0')
with raises(AttributeError, match='simulate missing demistoVersion'):
is_demisto_version_ge('4.5.0')
def test_is_demisto_version_ge_dev_version(self, mocker):
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.0.0',
'buildNumber': '50000'
}
)
assert is_demisto_version_ge('6.0.0', '1-dev')
@pytest.mark.parametrize('version, build', [
('6.0.0', '49999'),
('6.0.0', '50000'),
('6.0.0', '6'), # Added with the fix of https://github.com/demisto/etc/issues/36876
('5.5.0', '50001')
])
def test_is_demisto_version_build_ge(self, mocker, version, build):
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.0.0',
'buildNumber': '50000'
}
)
assert is_demisto_version_ge(version, build)
@pytest.mark.parametrize('version, build', [
('6.0.0', '50001'),
('6.1.0', '49999')
])
def test_is_demisto_version_build_ge_negative(self, mocker, version, build):
mocker.patch.object(
demisto,
'demistoVersion',
return_value={
'version': '6.0.0',
'buildNumber': '50000'
}
)
assert not is_demisto_version_ge(version, build)
def test_smart_get_dict():
d = {'t1': None, "t2": 1}
# before we remove the dict will return null which is unexpected by a lot of users
assert d.get('t1', 2) is None
s = SmartGetDict(d)
assert s.get('t1', 2) == 2
assert s.get('t2') == 1
assert s.get('t3') is None
class TestCustomIndicator:
def test_custom_indicator_init_success(self):
"""
Given: Data needed for creating a custom indicator
When: Data is valid
Then: Create a valid custom indicator
"""
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
indicator = Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
assert indicator.CONTEXT_PATH == 'prefix(val.value && val.value == obj.value)'
assert indicator.param == 'value'
assert indicator.value == 'test_value'
def test_custom_indicator_init_existing_type(self):
"""
Given: Data needed for creating a custom indicator
When: Type already exists
Then: raise a Value Error
"""
with pytest.raises(ValueError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('ip', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
def test_custom_indicator_init_no_prefix(self):
"""
Given: Data needed for Custom indicator
When: Prefix provided is None
Then: Raise ValueError
"""
with pytest.raises(ValueError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, None)
def test_custom_indicator_init_no_dbot_score(self):
"""
Given: Data needed for Custom indicator
When: Dbotscore is not a DBotScore object
Then: Raise ValueError
"""
with pytest.raises(ValueError):
from CommonServerPython import Common
dbot_score = ''
Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
def test_custom_indicator_to_context(self):
"""
Given: Data needed for Custom indicator
When: there's a call to to_context
Then: create a valid context
"""
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
indicator = Common.CustomIndicator('test', 'test_value', dbot_score, {'param': 'value'}, 'prefix')
context = indicator.to_context()
assert context['DBotScore(val.Indicator &&'
' val.Indicator == obj.Indicator &&'
' val.Vendor == obj.Vendor && val.Type == obj.Type)']['Indicator'] == 'test'
assert context['prefix(val.value && val.value == obj.value)']['value'] == 'test_value'
assert context['prefix(val.value && val.value == obj.value)']['param'] == 'value'
def test_custom_indicator_no_params(self):
"""
Given: Data needed for creating a custom indicator
When: params are None
Then: Raise an error
"""
with pytest.raises(TypeError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('test', 'test_value', dbot_score, None, 'prefix')
def test_custom_indicator_no_value(self):
"""
Given: Data needed for creating a custom indicator
When: value is None
Then: Raise an error
"""
with pytest.raises(ValueError):
from CommonServerPython import Common, DBotScoreType
dbot_score = Common.DBotScore(
'test',
DBotScoreType.CUSTOM,
'VirusTotal',
score=Common.DBotScore.BAD,
malicious_description='malicious!'
)
Common.CustomIndicator('test', None, dbot_score, {'param': 'value'}, 'prefix')
@pytest.mark.parametrize(
"demistoUrls,expected_result",
[({'server': 'https://localhost:8443:/acc_test_tenant'}, 'acc_test_tenant'),
({'server': 'https://localhost:8443'}, '')])
def test_get_tenant_name(mocker, demistoUrls, expected_result):
"""
Given
- demistoUrls dictionary
When
- Running on multi tenant mode
- Running on single tenant mode
Then
- Return tenant account name if is multi tenant
"""
from CommonServerPython import get_tenant_account_name
mocker.patch.object(demisto, 'demistoUrls', return_value=demistoUrls)
result = get_tenant_account_name()
assert result == expected_result
IOCS = {'iocs': [{'id': '2323', 'value': 'google.com'},
{'id': '5942', 'value': '1.1.1.1'}]}
def test_indicators_value_to_clickable(mocker):
from CommonServerPython import indicators_value_to_clickable
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(IndicatorsSearcher, '__next__', side_effect=[IOCS, StopIteration])
result = indicators_value_to_clickable(['1.1.1.1', 'google.com'])
assert result.get('1.1.1.1') == '[1.1.1.1](#/indicator/5942)'
assert result.get('google.com') == '[google.com](#/indicator/2323)'
def test_indicators_value_to_clickable_invalid(mocker):
from CommonServerPython import indicators_value_to_clickable
from CommonServerPython import IndicatorsSearcher
mocker.patch.object(IndicatorsSearcher, '__next__', side_effect=[StopIteration])
result = indicators_value_to_clickable(['8.8.8.8', 'abc.com'])
assert not result
result = indicators_value_to_clickable(None)
assert not result
def test_arg_to_number():
"""
Test if arg_to_number handles unicode object without failing.
"""
from CommonServerPython import arg_to_number
result = arg_to_number(u'1')
assert result == 1
def test_get_message_threads_dump():
from CommonServerPython import get_message_threads_dump
result = str(get_message_threads_dump(None, None))
assert ' Start Threads Dump ' in result
assert ' End Threads Dump ' in result
assert 'CommonServerPython.py' in result
assert 'get_message_threads_dump' in result
def test_get_message_memory_dump():
from CommonServerPython import get_message_memory_dump
result = str(get_message_memory_dump(None, None))
assert ' Start Variables Dump ' in result
assert ' Start Local Vars ' in result
assert ' End Local Vars ' in result
assert ' Start Top ' in result
assert ' Globals by Size ' in result
assert ' End Top ' in result
assert ' End Variables Dump ' in result
def test_shorten_string_for_printing():
from CommonServerPython import shorten_string_for_printing
assert shorten_string_for_printing(None, None) is None
assert shorten_string_for_printing('1', 9) == '1'
assert shorten_string_for_printing('123456789', 9) == '123456789'
assert shorten_string_for_printing('1234567890', 9) == '123...890'
assert shorten_string_for_printing('12345678901', 9) == '123...901'
assert shorten_string_for_printing('123456789012', 9) == '123...012'
assert shorten_string_for_printing('1234567890', 10) == '1234567890'
assert shorten_string_for_printing('12345678901', 10) == '1234...901'
assert shorten_string_for_printing('123456789012', 10) == '1234...012'
def test_get_size_of_object():
from CommonServerPython import get_size_of_object
class Object(object):
pass
level_3 = Object()
level_3.key3 = 'val3'
level_2 = Object()
level_2.key2 = 'val2'
level_2.child = level_3
level_1 = Object()
level_1.key1 = 'val1'
level_1.child = level_2
level_1_sys_size = sys.getsizeof(level_1)
level_1_deep_size = get_size_of_object(level_1)
# 3 levels, so shoulod be at least 3 times as large
assert level_1_deep_size > 3 * level_1_sys_size
class TestSetAndGetLastMirrorRun:
def test_get_last_mirror_run_in_6_6(self, mocker):
"""
Given: 6.6.0 environment and getLastMirrorRun returns results
When: Execute mirroring run
Then: Returning demisto.getLastRun object
"""
import demistomock as demisto
from CommonServerPython import get_last_mirror_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.6.0"})
mocker.patch.object(demisto, 'getLastMirrorRun', return_value={"lastMirrorRun": "2018-10-24T14:13:20+00:00"})
result = get_last_mirror_run()
assert result == {"lastMirrorRun": "2018-10-24T14:13:20+00:00"}
def test_get_last_mirror_run_in_6_6_when_return_empty_results(self, mocker):
"""
Given: 6.6.0 environment and getLastMirrorRun returns empty results
When: Execute mirroring run
Then: Returning demisto.getLastRun empty object
"""
import demistomock as demisto
from CommonServerPython import get_last_mirror_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.6.0"})
mocker.patch.object(demisto, 'getLastMirrorRun', return_value={})
result = get_last_mirror_run()
assert result == {}
def test_get_last_run_in_6_5(self, mocker):
"""
Given: 6.5.0 environment and getLastMirrorRun returns results
When: Execute mirroring run
Then: Get a string which represent we can't use this function
"""
import demistomock as demisto
from CommonServerPython import get_last_mirror_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.5.0"})
get_last_run = mocker.patch.object(demisto, 'getLastMirrorRun')
with raises(DemistoException, match='You cannot use getLastMirrorRun as your version is below 6.6.0'):
get_last_mirror_run()
assert get_last_run.called is False
def test_set_mirror_last_run_in_6_6(self, mocker):
"""
Given: 6.6.0 environment
When: Execute mirroring run
Then: Using demisto.setLastMirrorRun to save results
"""
import demistomock as demisto
from CommonServerPython import set_last_mirror_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.6.0"})
set_last_run = mocker.patch.object(demisto, 'setLastMirrorRun', return_value={})
set_last_mirror_run({"lastMirrorRun": "2018-10-24T14:13:20+00:00"})
set_last_run.assert_called_with({"lastMirrorRun": "2018-10-24T14:13:20+00:00"})
def test_set_mirror_last_run_in_6_5(self, mocker):
"""
Given: 6.5.0 environment
When: Execute mirroring run
Then: Don't use demisto.setLastMirrorRun
"""
import demistomock as demisto
from CommonServerPython import set_last_mirror_run
mocker.patch('CommonServerPython.get_demisto_version', return_value={"version": "6.5.0"})
set_last_run = mocker.patch.object(demisto, 'setLastMirrorRun', return_value={})
with raises(DemistoException, match='You cannot use setLastMirrorRun as your version is below 6.6.0'):
set_last_mirror_run({"lastMirrorRun": "2018-10-24T14:13:20+00:00"})
assert set_last_run.called is False
class TestTracebackLineNumberAdgustment:
@staticmethod
def test_module_line_number_mapping():
from CommonServerPython import _MODULES_LINE_MAPPING
assert _MODULES_LINE_MAPPING['CommonServerPython']['start'] == 0
@staticmethod
def test_register_module_line_sanity():
"""
Given:
A module with a start and an end boundries.
When:
registering a module.
Then:
* module exists in the mapping with valid boundries.
"""
import CommonServerPython
CommonServerPython.register_module_line('Sanity', 'start', 5)
CommonServerPython.register_module_line('Sanity', 'end', 50)
assert CommonServerPython._MODULES_LINE_MAPPING['Sanity'] == {
'start': 5,
'start_wrapper': 5,
'end': 50,
'end_wrapper': 50,
}
@staticmethod
def test_register_module_line_single_boundry():
"""
Given:
* A module with only an end boundry.
* A module with only a start boundry.
When:
registering a module.
Then:
* both modules exists in the mapping.
* the missing boundry is 0 for start and infinity for end.
"""
import CommonServerPython
CommonServerPython.register_module_line('NoStart', 'end', 4)
CommonServerPython.register_module_line('NoEnd', 'start', 100)
assert CommonServerPython._MODULES_LINE_MAPPING['NoStart'] == {
'start': 0,
'start_wrapper': 0,
'end': 4,
'end_wrapper': 4,
}
assert CommonServerPython._MODULES_LINE_MAPPING['NoEnd'] == {
'start': 100,
'start_wrapper': 100,
'end': float('inf'),
'end_wrapper': float('inf'),
}
@staticmethod
def test_register_module_line_invalid_inputs():
"""
Given:
* invalid start_end flag.
* invalid line number.
When:
registering a module.
Then:
function exits quietly
"""
import CommonServerPython
CommonServerPython.register_module_line('Cactus', 'statr', 5)
CommonServerPython.register_module_line('Cactus', 'start', '5')
CommonServerPython.register_module_line('Cactus', 'statr', -5)
CommonServerPython.register_module_line('Cactus', 'statr', 0, -1)
@staticmethod
def test_fix_traceback_line_numbers():
import CommonServerPython
CommonServerPython._MODULES_LINE_MAPPING = {
'CommonServerPython': {'start': 200, 'end': 865, 'end_wrapper': 900},
'TestTracebackLines': {'start': 901, 'end': float('inf'), 'start_wrapper': 901},
'TestingApiModule': {'start': 1004, 'end': 1032, 'start_wrapper': 1001, 'end_wrapper': 1033},
}
traceback = '''Traceback (most recent call last):
File "<string>", line 1043, in <module>
File "<string>", line 986, in main
File "<string>", line 600, in func_wrapper
File "<string>", line 1031, in api_module_call_script
File "<string>", line 927, in call_func
Exception: WTF?!!!'''
expected_traceback = '''Traceback (most recent call last):
File "<TestTracebackLines>", line 110, in <module>
File "<TestTracebackLines>", line 85, in main
File "<CommonServerPython>", line 400, in func_wrapper
File "<TestingApiModule>", line 27, in api_module_call_script
File "<TestTracebackLines>", line 26, in call_func
Exception: WTF?!!!'''
result = CommonServerPython.fix_traceback_line_numbers(traceback)
assert result == expected_traceback
| 37.732239
| 1,067
| 0.581539
|
3451b8d047429de7c3c8edf4053ac9dfed1df361
| 3,079
|
py
|
Python
|
demos/streaming_asr_server/websocket_client.py
|
jerryuhoo/PaddleSpeech
|
1eec7b5e042da294c7524af92f0fae4c32a71aa3
|
[
"Apache-2.0"
] | 1,379
|
2021-11-10T02:42:21.000Z
|
2022-03-31T13:34:25.000Z
|
demos/streaming_asr_server/websocket_client.py
|
jerryuhoo/PaddleSpeech
|
1eec7b5e042da294c7524af92f0fae4c32a71aa3
|
[
"Apache-2.0"
] | 268
|
2021-11-10T14:07:34.000Z
|
2022-03-31T02:25:20.000Z
|
demos/streaming_asr_server/websocket_client.py
|
jerryuhoo/PaddleSpeech
|
1eec7b5e042da294c7524af92f0fae4c32a71aa3
|
[
"Apache-2.0"
] | 296
|
2021-11-15T02:37:11.000Z
|
2022-03-31T12:14:46.000Z
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python
# -*- coding: UTF-8 -*-
import argparse
import asyncio
import codecs
import logging
import os
from paddlespeech.cli.log import logger
from paddlespeech.server.utils.audio_handler import ASRWsAudioHandler
def main(args):
logger.info("asr websocket client start")
handler = ASRWsAudioHandler(
args.server_ip,
args.port,
endpoint=args.endpoint,
punc_server_ip=args.punc_server_ip,
punc_server_port=args.punc_server_port)
loop = asyncio.get_event_loop()
# support to process single audio file
if args.wavfile and os.path.exists(args.wavfile):
logger.info(f"start to process the wavscp: {args.wavfile}")
result = loop.run_until_complete(handler.run(args.wavfile))
result = result["result"]
logger.info(f"asr websocket client finished : {result}")
# support to process batch audios from wav.scp
if args.wavscp and os.path.exists(args.wavscp):
logging.info(f"start to process the wavscp: {args.wavscp}")
with codecs.open(args.wavscp, 'r', encoding='utf-8') as f,\
codecs.open("result.txt", 'w', encoding='utf-8') as w:
for line in f:
utt_name, utt_path = line.strip().split()
result = loop.run_until_complete(handler.run(utt_path))
result = result["result"]
w.write(f"{utt_name} {result}\n")
if __name__ == "__main__":
logger.info("Start to do streaming asr client")
parser = argparse.ArgumentParser()
parser.add_argument(
'--server_ip', type=str, default='127.0.0.1', help='server ip')
parser.add_argument('--port', type=int, default=8090, help='server port')
parser.add_argument(
'--punc.server_ip',
type=str,
default=None,
dest="punc_server_ip",
help='Punctuation server ip')
parser.add_argument(
'--punc.port',
type=int,
default=8091,
dest="punc_server_port",
help='Punctuation server port')
parser.add_argument(
"--endpoint",
type=str,
default="/paddlespeech/asr/streaming",
help="ASR websocket endpoint")
parser.add_argument(
"--wavfile",
action="store",
help="wav file path ",
default="./16_audio.wav")
parser.add_argument(
"--wavscp", type=str, default=None, help="The batch audios dict text")
args = parser.parse_args()
main(args)
| 34.988636
| 78
| 0.656057
|
836d2f40f56d5bee0c4aeefa3ace84c42e3e09cf
| 333
|
py
|
Python
|
DAQmxConfigTest.py
|
franciszekjuras/PyDAQmx
|
8fd3b8f4cfdb1b8082e40b9128edd9963f0f6fc2
|
[
"BSD-3-Clause"
] | 103
|
2015-01-16T09:26:41.000Z
|
2022-01-20T16:57:17.000Z
|
DAQmxConfigTest.py
|
franciszekjuras/PyDAQmx
|
8fd3b8f4cfdb1b8082e40b9128edd9963f0f6fc2
|
[
"BSD-3-Clause"
] | 50
|
2015-01-15T12:52:54.000Z
|
2021-12-09T13:03:32.000Z
|
DAQmxConfigTest.py
|
franciszekjuras/PyDAQmx
|
8fd3b8f4cfdb1b8082e40b9128edd9963f0f6fc2
|
[
"BSD-3-Clause"
] | 50
|
2015-02-13T18:06:28.000Z
|
2022-03-19T15:02:44.000Z
|
import os
# To use non default config parameters or use the PyDAQmx without NIDAQmx isntalled
# Example
# import DAQmxConfigTest
# DAQmxConfigTest.dot_h_file = "..." # optional modification
# import PyDAQmx
lib_name = None
directory = os.path.split(os.path.realpath(__file__))[0]
dot_h_file = os.path.join(directory, "NIDAQmx.h")
| 25.615385
| 83
| 0.765766
|
26b88785a1121602b1707cd5735228b728ca6ca7
| 642
|
py
|
Python
|
churchill/api/v1/user/urls.py
|
manti-by/traugott
|
6ae05a53c14b29a08daa02a8de1624671f8f063a
|
[
"BSD-3-Clause"
] | null | null | null |
churchill/api/v1/user/urls.py
|
manti-by/traugott
|
6ae05a53c14b29a08daa02a8de1624671f8f063a
|
[
"BSD-3-Clause"
] | 11
|
2021-01-11T20:52:04.000Z
|
2021-05-12T09:12:38.000Z
|
churchill/api/v1/user/urls.py
|
manti-by/churchill
|
6ae05a53c14b29a08daa02a8de1624671f8f063a
|
[
"BSD-3-Clause"
] | null | null | null |
from django.urls import path
from churchill.api.v1.user.views import (
RegisterView,
LoginView,
RegisterVerifyView,
ResetPasswordView,
ResetPasswordConfirmView,
)
app_name = "user"
urlpatterns = [
path("register/", RegisterView.as_view(), name="register"),
path("register-verify/", RegisterVerifyView.as_view(), name="register_verify"),
path("login/", LoginView.as_view(), name="login"),
path("reset-password/", ResetPasswordView.as_view(), name="reset_password"),
path(
"reset-password-confirm/",
ResetPasswordConfirmView.as_view(),
name="reset_password_confirm",
),
]
| 25.68
| 83
| 0.683801
|
2c89bf72325529fbc6a00f8be4cf12c9a1fd5c9d
| 645
|
py
|
Python
|
senior_project/personal/urls.py
|
eigenholser/jobsifit
|
ae5329864717acf27d657c10029791294abcd9b3
|
[
"MIT"
] | null | null | null |
senior_project/personal/urls.py
|
eigenholser/jobsifit
|
ae5329864717acf27d657c10029791294abcd9b3
|
[
"MIT"
] | null | null | null |
senior_project/personal/urls.py
|
eigenholser/jobsifit
|
ae5329864717acf27d657c10029791294abcd9b3
|
[
"MIT"
] | null | null | null |
from django.conf.urls import url, include
from . import views
from django.contrib import admin
urlpatterns = [
#url(r'^bfapp/', include('bfapp.urls')),
# url(r'^admin/', include(admin.site.urls)), #like its own app
url(r'^$', views.index, name='index'),
url(r'^contact/', views.contact, name='contact'),
url(r'^$', views.home, name='home'),
# url(r'^users/login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
]
#url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'personal/login.html'}), # matches /m/login
#url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'),
| 37.941176
| 116
| 0.657364
|
1d5090ed242062b81533352a041f512a9646124e
| 100
|
py
|
Python
|
enthought/mayavi/modules/vector_cut_plane.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/mayavi/modules/vector_cut_plane.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/mayavi/modules/vector_cut_plane.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from __future__ import absolute_import
from mayavi.modules.vector_cut_plane import *
| 25
| 45
| 0.85
|
ccee01ea0abe31c37c6e4f9cb2b8f055645f89df
| 8,496
|
py
|
Python
|
datasets/casino/casino.py
|
zidingz/datasets
|
02edd9ebc79f715adb1c718d1439fda83dc356f1
|
[
"Apache-2.0"
] | 10,608
|
2020-09-10T15:47:50.000Z
|
2022-03-31T22:51:47.000Z
|
datasets/casino/casino.py
|
zidingz/datasets
|
02edd9ebc79f715adb1c718d1439fda83dc356f1
|
[
"Apache-2.0"
] | 2,396
|
2020-09-10T14:55:31.000Z
|
2022-03-31T19:41:04.000Z
|
datasets/casino/casino.py
|
zidingz/datasets
|
02edd9ebc79f715adb1c718d1439fda83dc356f1
|
[
"Apache-2.0"
] | 1,530
|
2020-09-10T21:43:10.000Z
|
2022-03-31T01:59:12.000Z
|
# coding=utf-8
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Campsite Negotiation Dialogues"""
import json
import datasets
_CITATION = """\
@inproceedings{chawla2021casino,
title={CaSiNo: A Corpus of Campsite Negotiation Dialogues for Automatic Negotiation Systems},
author={Chawla, Kushal and Ramirez, Jaysa and Clever, Rene and Lucas, Gale and May, Jonathan and Gratch, Jonathan},
booktitle={Proceedings of the 2021 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies},
pages={3167--3185},
year={2021}
}
"""
_DESCRIPTION = """\
We provide a novel dataset (referred to as CaSiNo) of 1030 negotiation dialogues. Two participants take the role of campsite neighbors and negotiate for Food, Water, and Firewood packages, based on their individual preferences and requirements. This design keeps the task tractable, while still facilitating linguistically rich and personal conversations. This helps to overcome the limitations of prior negotiation datasets such as Deal or No Deal and Craigslist Bargain. Each dialogue consists of rich meta-data including participant demographics, personality, and their subjective evaluation of the negotiation in terms of satisfaction and opponent likeness.
"""
_HOMEPAGE = "https://github.com/kushalchawla/CaSiNo"
_LICENSE = "The project is licensed under CC-BY-4.0"
_URLs = {
"train": "https://raw.githubusercontent.com/kushalchawla/CaSiNo/main/data/casino.json",
}
class Casino(datasets.GeneratorBasedBuilder):
"""Campsite Negotiation Dialogues"""
VERSION = datasets.Version("1.1.0")
def _info(self):
features = datasets.Features(
{
"chat_logs": [
{
"text": datasets.Value("string"),
"task_data": {
"data": datasets.Value("string"),
"issue2youget": {
"Firewood": datasets.Value("string"),
"Water": datasets.Value("string"),
"Food": datasets.Value("string"),
},
"issue2theyget": {
"Firewood": datasets.Value("string"),
"Water": datasets.Value("string"),
"Food": datasets.Value("string"),
},
},
"id": datasets.Value("string"),
},
],
"participant_info": {
"mturk_agent_1": {
"value2issue": {
"Low": datasets.Value("string"),
"Medium": datasets.Value("string"),
"High": datasets.Value("string"),
},
"value2reason": {
"Low": datasets.Value("string"),
"Medium": datasets.Value("string"),
"High": datasets.Value("string"),
},
"outcomes": {
"points_scored": datasets.Value("int32"),
"satisfaction": datasets.Value("string"),
"opponent_likeness": datasets.Value("string"),
},
"demographics": {
"age": datasets.Value("int32"),
"gender": datasets.Value("string"),
"ethnicity": datasets.Value("string"),
"education": datasets.Value("string"),
},
"personality": {
"svo": datasets.Value("string"),
"big-five": {
"extraversion": datasets.Value("float"),
"agreeableness": datasets.Value("float"),
"conscientiousness": datasets.Value("float"),
"emotional-stability": datasets.Value("float"),
"openness-to-experiences": datasets.Value("float"),
},
},
},
"mturk_agent_2": {
"value2issue": {
"Low": datasets.Value("string"),
"Medium": datasets.Value("string"),
"High": datasets.Value("string"),
},
"value2reason": {
"Low": datasets.Value("string"),
"Medium": datasets.Value("string"),
"High": datasets.Value("string"),
},
"outcomes": {
"points_scored": datasets.Value("int32"),
"satisfaction": datasets.Value("string"),
"opponent_likeness": datasets.Value("string"),
},
"demographics": {
"age": datasets.Value("int32"),
"gender": datasets.Value("string"),
"ethnicity": datasets.Value("string"),
"education": datasets.Value("string"),
},
"personality": {
"svo": datasets.Value("string"),
"big-five": {
"extraversion": datasets.Value("float"),
"agreeableness": datasets.Value("float"),
"conscientiousness": datasets.Value("float"),
"emotional-stability": datasets.Value("float"),
"openness-to-experiences": datasets.Value("float"),
},
},
},
},
"annotations": [[datasets.Value("string")]],
}
)
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=features,
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
path = dl_manager.download_and_extract(_URLs["train"])
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"filepath": path,
"split": "train",
},
),
]
def _generate_examples(self, filepath, split="train"):
"""Yields examples."""
with open(filepath, encoding="utf-8") as f:
all_data = json.load(f)
for idx, item in enumerate(all_data):
for chat_item in item["chat_logs"]:
if "data" not in chat_item["task_data"]:
chat_item["task_data"]["data"] = ""
if "issue2youget" not in chat_item["task_data"]:
chat_item["task_data"]["issue2youget"] = {
"Food": "",
"Firewood": "",
"Water": "",
}
if "issue2theyget" not in chat_item["task_data"]:
chat_item["task_data"]["issue2theyget"] = {
"Food": "",
"Firewood": "",
"Water": "",
}
item.pop("dialogue_id")
yield idx, item
| 44.020725
| 661
| 0.471869
|
e6197efed047ea332a3d052fecc146b10bda2d11
| 12,265
|
py
|
Python
|
test/opsgenie_swagger/models/v_center_integration.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | null | null | null |
test/opsgenie_swagger/models/v_center_integration.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | null | null | null |
test/opsgenie_swagger/models/v_center_integration.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | 1
|
2020-11-07T11:27:13.000Z
|
2020-11-07T11:27:13.000Z
|
# coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from opsgenie_swagger.models.integration import Integration # noqa: F401,E501
from opsgenie_swagger.models.recipient import Recipient # noqa: F401,E501
from opsgenie_swagger.models.team_meta import TeamMeta # noqa: F401,E501
from opsgenie_swagger.models.token_based_incoming_feature import TokenBasedIncomingFeature # noqa: F401,E501
class VCenterIntegration(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'suppress_notifications': 'bool',
'ignore_teams_from_payload': 'bool',
'ignore_recipients_from_payload': 'bool',
'recipients': 'list[Recipient]',
'is_advanced': 'bool',
'feature_type': 'str',
'allow_configuration_access': 'bool',
'allow_write_access': 'bool'
}
attribute_map = {
'suppress_notifications': 'suppressNotifications',
'ignore_teams_from_payload': 'ignoreTeamsFromPayload',
'ignore_recipients_from_payload': 'ignoreRecipientsFromPayload',
'recipients': 'recipients',
'is_advanced': 'isAdvanced',
'feature_type': 'feature-type',
'allow_configuration_access': 'allowConfigurationAccess',
'allow_write_access': 'allowWriteAccess'
}
def __init__(self, suppress_notifications=None, ignore_teams_from_payload=None, ignore_recipients_from_payload=None, recipients=None, is_advanced=None, feature_type=None, allow_configuration_access=None, allow_write_access=None): # noqa: E501
"""VCenterIntegration - a model defined in Swagger""" # noqa: E501
self._suppress_notifications = None
self._ignore_teams_from_payload = None
self._ignore_recipients_from_payload = None
self._recipients = None
self._is_advanced = None
self._feature_type = None
self._allow_configuration_access = None
self._allow_write_access = None
self.discriminator = None
if suppress_notifications is not None:
self.suppress_notifications = suppress_notifications
if ignore_teams_from_payload is not None:
self.ignore_teams_from_payload = ignore_teams_from_payload
if ignore_recipients_from_payload is not None:
self.ignore_recipients_from_payload = ignore_recipients_from_payload
if recipients is not None:
self.recipients = recipients
if is_advanced is not None:
self.is_advanced = is_advanced
if feature_type is not None:
self.feature_type = feature_type
if allow_configuration_access is not None:
self.allow_configuration_access = allow_configuration_access
if allow_write_access is not None:
self.allow_write_access = allow_write_access
@property
def suppress_notifications(self):
"""Gets the suppress_notifications of this VCenterIntegration. # noqa: E501
If enabled, notifications that come from alerts will be suppressed. Defaults to false # noqa: E501
:return: The suppress_notifications of this VCenterIntegration. # noqa: E501
:rtype: bool
"""
return self._suppress_notifications
@suppress_notifications.setter
def suppress_notifications(self, suppress_notifications):
"""Sets the suppress_notifications of this VCenterIntegration.
If enabled, notifications that come from alerts will be suppressed. Defaults to false # noqa: E501
:param suppress_notifications: The suppress_notifications of this VCenterIntegration. # noqa: E501
:type: bool
"""
self._suppress_notifications = suppress_notifications
@property
def ignore_teams_from_payload(self):
"""Gets the ignore_teams_from_payload of this VCenterIntegration. # noqa: E501
If enabled, the integration will ignore teams sent in request payloads. Defaults to false # noqa: E501
:return: The ignore_teams_from_payload of this VCenterIntegration. # noqa: E501
:rtype: bool
"""
return self._ignore_teams_from_payload
@ignore_teams_from_payload.setter
def ignore_teams_from_payload(self, ignore_teams_from_payload):
"""Sets the ignore_teams_from_payload of this VCenterIntegration.
If enabled, the integration will ignore teams sent in request payloads. Defaults to false # noqa: E501
:param ignore_teams_from_payload: The ignore_teams_from_payload of this VCenterIntegration. # noqa: E501
:type: bool
"""
self._ignore_teams_from_payload = ignore_teams_from_payload
@property
def ignore_recipients_from_payload(self):
"""Gets the ignore_recipients_from_payload of this VCenterIntegration. # noqa: E501
If enabled, the integration will ignore recipients sent in request payloads. Defaults to false # noqa: E501
:return: The ignore_recipients_from_payload of this VCenterIntegration. # noqa: E501
:rtype: bool
"""
return self._ignore_recipients_from_payload
@ignore_recipients_from_payload.setter
def ignore_recipients_from_payload(self, ignore_recipients_from_payload):
"""Sets the ignore_recipients_from_payload of this VCenterIntegration.
If enabled, the integration will ignore recipients sent in request payloads. Defaults to false # noqa: E501
:param ignore_recipients_from_payload: The ignore_recipients_from_payload of this VCenterIntegration. # noqa: E501
:type: bool
"""
self._ignore_recipients_from_payload = ignore_recipients_from_payload
@property
def recipients(self):
"""Gets the recipients of this VCenterIntegration. # noqa: E501
Optional user, schedule, teams or escalation names to calculate which users will receive the notifications of the alert. Recipients which are exceeding the limit are ignored # noqa: E501
:return: The recipients of this VCenterIntegration. # noqa: E501
:rtype: list[Recipient]
"""
return self._recipients
@recipients.setter
def recipients(self, recipients):
"""Sets the recipients of this VCenterIntegration.
Optional user, schedule, teams or escalation names to calculate which users will receive the notifications of the alert. Recipients which are exceeding the limit are ignored # noqa: E501
:param recipients: The recipients of this VCenterIntegration. # noqa: E501
:type: list[Recipient]
"""
self._recipients = recipients
@property
def is_advanced(self):
"""Gets the is_advanced of this VCenterIntegration. # noqa: E501
:return: The is_advanced of this VCenterIntegration. # noqa: E501
:rtype: bool
"""
return self._is_advanced
@is_advanced.setter
def is_advanced(self, is_advanced):
"""Sets the is_advanced of this VCenterIntegration.
:param is_advanced: The is_advanced of this VCenterIntegration. # noqa: E501
:type: bool
"""
self._is_advanced = is_advanced
@property
def feature_type(self):
"""Gets the feature_type of this VCenterIntegration. # noqa: E501
:return: The feature_type of this VCenterIntegration. # noqa: E501
:rtype: str
"""
return self._feature_type
@feature_type.setter
def feature_type(self, feature_type):
"""Sets the feature_type of this VCenterIntegration.
:param feature_type: The feature_type of this VCenterIntegration. # noqa: E501
:type: str
"""
allowed_values = ["email-based", "token-based"] # noqa: E501
if feature_type not in allowed_values:
raise ValueError(
"Invalid value for `feature_type` ({0}), must be one of {1}" # noqa: E501
.format(feature_type, allowed_values)
)
self._feature_type = feature_type
@property
def allow_configuration_access(self):
"""Gets the allow_configuration_access of this VCenterIntegration. # noqa: E501
This parameter is for allowing or restricting the configuration access. If configuration access is restricted, the integration will be limited to Alert API requests and sending heartbeats. Defaults to false # noqa: E501
:return: The allow_configuration_access of this VCenterIntegration. # noqa: E501
:rtype: bool
"""
return self._allow_configuration_access
@allow_configuration_access.setter
def allow_configuration_access(self, allow_configuration_access):
"""Sets the allow_configuration_access of this VCenterIntegration.
This parameter is for allowing or restricting the configuration access. If configuration access is restricted, the integration will be limited to Alert API requests and sending heartbeats. Defaults to false # noqa: E501
:param allow_configuration_access: The allow_configuration_access of this VCenterIntegration. # noqa: E501
:type: bool
"""
self._allow_configuration_access = allow_configuration_access
@property
def allow_write_access(self):
"""Gets the allow_write_access of this VCenterIntegration. # noqa: E501
This parameter is for configuring the read-only access of integration. If the integration is limited to read-only access, the integration will not be authorized to perform any create, update or delete action within any domain. Defaults to true # noqa: E501
:return: The allow_write_access of this VCenterIntegration. # noqa: E501
:rtype: bool
"""
return self._allow_write_access
@allow_write_access.setter
def allow_write_access(self, allow_write_access):
"""Sets the allow_write_access of this VCenterIntegration.
This parameter is for configuring the read-only access of integration. If the integration is limited to read-only access, the integration will not be authorized to perform any create, update or delete action within any domain. Defaults to true # noqa: E501
:param allow_write_access: The allow_write_access of this VCenterIntegration. # noqa: E501
:type: bool
"""
self._allow_write_access = allow_write_access
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, VCenterIntegration):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 38.569182
| 265
| 0.679413
|
e7194b7aa8564b8244dc18043c28374e5ee06b7e
| 14,983
|
py
|
Python
|
AppendData.py
|
BansheeLW/Python
|
e90798750c92bf7656896f426b49bfd90c1407ea
|
[
"MIT"
] | null | null | null |
AppendData.py
|
BansheeLW/Python
|
e90798750c92bf7656896f426b49bfd90c1407ea
|
[
"MIT"
] | null | null | null |
AppendData.py
|
BansheeLW/Python
|
e90798750c92bf7656896f426b49bfd90c1407ea
|
[
"MIT"
] | null | null | null |
import requests
from bs4 import BeautifulSoup
import xlwt
from xlrd import open_workbook
from xlutils.copy import copy
# 设置Excel样式
def set_style(name, height, bold=False):
style = xlwt.XFStyle() # 初始化样式
font = xlwt.Font() # 为样式创建字体
font.name = name # 'Times New Roman'
font.bold = bold
font.color_index = 4
font.height = height
style.font = font
return style
def getTableByPage(curPage,nextPage):
url = "http://www.szsti.gov.cn/services/hightech/default.aspx"
from_data = {
"PagerControl_input": str(curPage),
"__EVENTARGUMENT": str(nextPage),
"__EVENTTARGET": "PagerControl",
"__EVENTVALIDATION": "/wEdAAEAAAD/////AQAAAAAAAAAPAQAAAAYAAAAIR6EBhXahAYV1oQGFdKEBhRwU3f3MQ+d8CwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEJk3QAUPJr17ntVowFFRVa3ASeR3R0u0HpmlE0yngEF",
"__VIEWSTATE": "/wEMDAwQAgAADgEMBQAMEAIAAA4DDAUBDBACDAAMCBAEEAUAAQblhajpg6gBAAgIEAUAAQzlm73lrrbpq5jkvIEBATEJCBAFAAEP5rex5Zyz5biC6auY5LyBAQEyCQgQBQABDOmrmOmhueiupOWumgEBMwkIAAAMBQcMEAIPAQEFSXRlbXMFFAAOFAwFAQwQAgAADgYMBQEMEAIcGg1TeXN0ZW0uU3RyaW5nS21zY29ybGliLCBWZXJzaW9uPTQuMC4wLjAsIEN1bHR1cmU9bmV1dHJhbCwgUHVibGljS2V5VG9rZW49Yjc3YTVjNTYxOTM0ZTA4OQEBBTEzMDc0AAAMBQMMEAIcGwkAAQEKU1oyMDE2MTA2NgAADAUFDBACHBsJAAEBJOmRq+WFg+enkeaKgO+8iOa3seWcs++8ieaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBLea3seWcs+W4guWNl+WxseWMuuenkeaKgOW3peS4muWbree7tOeUqOWkp+WOpgAADAUJDBACHBsJAAEBBui9r+S7tgAADAULDBACHBsJAAECBAAAAAwFAgwQAgAADgYMBQEMEAIcGwkAAQEFMTMwNzMAAAwFAwwQAhwbCQABAQlTWjIwMTYyMDUAAAwFBQwQAhwbCQABASrljY/lkoznjq/kv53np5HmioDvvIjmt7HlnLPvvInmnInpmZDlhazlj7gAAAwFBwwQAhwbCQABAVzmt7HlnLPluILpvpnljY7mlrDljLrlpKfmtarlip7kuovlpITlpKfmtarnpL7ljLrljY7lroHot6/ljY7ogZTlt6XkuJrlm60xMOWPt+WOguaIv+esrOS4gOWxggAADAUJDBACHBsJAAEBFeawtOaxoeafk+aOp+WItuaKgOacrwAADAULDBACHBsJAAECBAAAAAwFAwwQAgAADgYMBQEMEAIcGwkAAQEFMTMwNzIAAAwFAwwQAhwbCQABAQpTWjIwMTYxMTcxAAAMBQUMEAIcGwkAAQEq5pmT6IO95LqS6IGU56eR5oqA77yI5rex5Zyz77yJ5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFF5rex5Zyz5biC5a6d5a6J5Yy65paw5a6J6KGX6YGTNTDljLrljY7mtbfllYbliqHlpKfljqZC5qCL5Zub5qW8NDA25Y+3AAAMBQkMEAIcGwkAAQEb6K6h566X5py65Y+K5YW257uI56uv5oqA5pyvAAAMBQsMEAIcGwkAAQIEAAAADAUEDBACAAAOBgwFAQwQAhwbCQABAQUxMzA3MQAADAUDDBACHBsJAAEBCVNaMjAxNjY0MQAADAUFDBACHBsJAAEBKuS8n+iDveacuueUteiuvuWkh++8iOa3seWcs++8ieaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBQOa3seWcs+W4guWuneWuieWMuuadvuWyl+ihl+mBk+eHleW3neWMl+mDqOW3peS4muWbrUI35qCL44CBQzbmoIsAAAwFCQwQAhwbCQABASfnlLXlipvns7vnu5/kv6Hmga/ljJbkuI7oh6rliqjljJbmioDmnK8AAAwFCwwQAhwbCQABAgQAAAAMBQUMEAIAAA4GDAUBDBACHBsJAAEBBTEzMDcwAAAMBQMMEAIcGwkAAQEJU1oyMDE2ODgxAAAMBQUMEAIcGwkAAQEt5Lyf5Yib6L6+55S15a2Q56eR5oqA77yI5rex5Zyz77yJ5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFF5rex5Zyz5biC5Y2X5bGx5Yy66auY5paw5Lit5Zub6YGTMzDlj7fpvpnms7DliKnlpKfljqblha3mpbw2MzAtNjMz5a6kAAAMBQkMEAIcGwkAAQEV572R57uc5Y+K6YCa5L+h5oqA5pyvAAAMBQsMEAIcGwkAAQIEAAAADAUGDBACAAAOBgwFAQwQAhwbCQABAQUxMzA2OQAADAUDDBACHBsJAAEBCVNaMjAxNjc0MwAADAUFDBACHBsJAAEBKue7tOWHhueUteWtkOenkeaKgO+8iOa3seWcs++8ieaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBT+a3seWcs+W4guWNl+WxseWMuuahg+a6kOihl+mBk+eVmeS7meWkp+mBkzEyMTPlj7fkvJflhqDnuqLoirHlsq3ljZc15Yy6M+agizLmpbwAAAwFCQwQAhwbCQABAhgAAAAMBQsMEAIcGwkAAQIEAAAADAUHDBACAAAOBgwFAQwQAhwbCQABAQUxMzA2OAAADAUDDBACHBsJAAEBCVNaMjAxNjQyMQAADAUFDBACHBsJAAEBEuW+ruaXheaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBROa3seWcs+W4guemj+eUsOWMuuemj+WNjuS4gOi3r+WFreWPt+WFjeeojuWVhuWKoeWkp+WOpuijmealvOilvzAz5bGCAAAMBQkMEAIcGwkAAQIOAAAADAULDBACHBsJAAECBAAAAAwFCAwQAgAADgYMBQEMEAIcGwkAAQEFMTMwNjcAAAwFAwwQAhwbCQABAQlTWjIwMTYyODYAAAwFBQwQAhwbCQABASrkuIfnu7Tnjq/nkIPnp5HmioDvvIjmt7HlnLPvvInmnInpmZDlhazlj7gAAAwFBwwQAhwbCQABAUPmt7HlnLPluILljZflsbHljLrljJfnjq/ot6/nrKzkupTlt6XkuJrljLrpo47kupHnp5HmioDlpKfljqYxMDHkuYsyAAAMBQkMEAIcGwkAAQIYAAAADAULDBACHBsJAAECBAAAAAwFCQwQAgAADgYMBQEMEAIcGwkAAQEFMTMwNjYAAAwFAwwQAhwbCQABAQlTWjIwMTYxMTIAAAwFBQwQAhwbCQABARjlpKnoiKrnp5HmioDmnInpmZDlhazlj7gAAAwFBwwQAhwbCQABAUHlub/kuJznnIHmt7HlnLPluILljZflsbHljLrnp5HmioDljZfljYHkuozot6/mlrnlpKflpKfljqYyMDUtLTIwNwAADAUJDBACHBsJAAECDgAAAAwFCwwQAhwbCQABAgQAAAAMBQoMEAIAAA4GDAUBDBACHBsJAAEBBTEzMDY1AAAMBQMMEAIcGwkAAQEJU1oyMDE2NzA3AAAMBQUMEAIcGwkAAQEt6aG65pmv5Zut57K+5a+G6ZO46YCg77yI5rex5Zyz77yJ5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFK5rex5Zyz5biC6b6Z5Y2O5paw5Yy66KeC5r6c6KGX6YGT5Li55rmW56S+5Yy65rOX6buO6Lev5r6c5aGY5bel5Lia5Yy6MTbmoIsAAAwFCQwQAhwbCQABAQzph5HlsZ7mnZDmlpkAAAwFCwwQAhwbCQABAgQAAAAMBQsMEAIAAA4GDAUBDBACHBsJAAEBBTEzMDY0AAAMBQMMEAIcGwkAAQEJU1oyMDE2NzM3AAAMBQUMEAIcGwkAAQEz55Sf54mp5rqQ55Sf54mp5oqA5pyv77yI5rex5Zyz77yJ6IKh5Lu95pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQEw5rex5Zyz5biC5Z2q5bGx5paw5Yy65Z2q5bGx6KGX6YGT5Z2q6JG16LevMjAw5Y+3AAAMBQkMEAIcGwkAAQES5Yac5Lia55Sf54mp5oqA5pyvAAAMBQsMEAIcGwkAAQIEAAAADAUMDBACAAAOBgwFAQwQAhwbCQABAQUxMzA2MwAADAUDDBACHBsJAAEBCVNaMjAxNjkyOQAADAUFDBACHBsJAAEBHua3seWcs+e6teaoqui9r+S7tuaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBPOa3seWcs+W4guemj+eUsOWMuue6ouWyreWMl+i3r+WFiOenkeacuueUteWkp+WOpjEzMTEtMTMxM+WupAAADAUJDBACHBsJAAECDgAAAAwFCwwQAhwbCQABAgQAAAAMBQ0MEAIAAA4GDAUBDBACHBsJAAEBBTEzMDYyAAAMBQMMEAIcGwkAAQEJU1oyMDE2NDg3AAAMBQUMEAIcGwkAAQEk5rex5Zyz5Li75Z+O572R57uc56eR5oqA5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFH5rex5Zyz5biC5Y2X5bGx5Yy65oub5ZWG6KGX6YGT5Y2X5rW35aSn6YGT6Iqx5qC35bm0576O5bm05bm/5Zy6MeagizTmpbwAAAwFCQwQAhwbCQABAg4AAAAMBQsMEAIcGwkAAQIEAAAADAUODBACAAAOBgwFAQwQAhwbCQABAQUxMzA2MQAADAUDDBACHBsJAAEBCVNaMjAxNjQyOAAADAUFDBACHBsJAAEBKua3seWcs+a0sumZhemAmuiIquaKlei1hOaOp+iCoeaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBa+a3seWcs+W4guWJjea1t+a3sea4r+WQiOS9nOWMuuWJjea5vuS4gOi3rzHlj7dB5qCLMjAx5a6k77yI5YWl6am75rex5Zyz5biC5YmN5rW35ZWG5Yqh56eY5Lmm5pyJ6ZmQ5YWs5Y+477yJAAAMBQkMEAIcGwkAAQEM6Iiq5aSp5oqA5pyvAAAMBQsMEAIcGwkAAQIEAAAADAUPDBACAAAOBgwFAQwQAhwbCQABAQUxMzA2MAAADAUDDBACHBsJAAEBCVNaMjAxNjY1OQAADAUFDBACHBsJAAEBKua3seWcs+S4reaZuue7j+a1juaKgOacr+WQiOS9nOaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBPea3seWcs+W4guemj+eUsOWMuua3seWNl+S4rei3rzEwMDLlj7fmlrDpl7vlpKfljqYx5Y+35qW8MzHlsYIAAAwFCQwQAhwbCQABAR7kv6Hmga/mioDmnK/kuJrliqHmtYHnqIvlpJbljIUAAAwFCwwQAhwbCQABAgQAAAAMBRAMEAIAAA4GDAUBDBACHBsJAAEBBTEzMDU5AAAMBQMMEAIcGwkAAQEJU1oyMDE2ODUyAAAMBQUMEAIcGwkAAQEk5rex5Zyz5Lit55uI5Yib5bGV56eR5oqA5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFE5rex5Zyz5biC5Y2X5bGx5Yy65qGD5rqQ6KGX6YGT57qi6Iqx5bKt5bel5Lia5Zut56ysNeWMukIx5qCLOOalvDHlj7cAAAwFCQwQAhwbCQABARLmmbrog73kuqTpgJrmioDmnK8AAAwFCwwQAhwbCQABAgQAAAAMBREMEAIAAA4GDAUBDBACHBsJAAEBBTEzMDU4AAAMBQMMEAIcGwkAAQEJU1oyMDE2MTYzAAAMBQUMEAIcGwkAAQEq5rex5Zyz5Lit6K6k6YCa5rWL5qOA6aqM5oqA5pyv5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFM5rex5Zyz5biC6b6Z5Y2O5paw5Yy66KeC5r6c6KGX6YGT6auY5paw5oqA5pyv5Lqn5Lia5Zut5ZiJ5rOJ5aSn5Y6mMuagizEtMuWxggAADAUJDBACHBsJAAEBDOWFseaAp+aKgOacrwAADAULDBACHBsJAAECBAAAAAwFEgwQAgAADgYMBQEMEAIcGwkAAQEFMTMwNTcAAAwFAwwQAhwbCQABAQpTWjIwMTYxMDEzAAAMBQUMEAIcGwkAAQEn5rex5Zyz5Lit56eR5pm65pif6YCa56eR5oqA5pyJ6ZmQ5YWs5Y+4AAAMBQcMEAIcGwkAAQFb5rex5Zyz5biC5a6d5a6J5Yy65paw5a6J6KGX6YGT5Yib5Lia5LqM6LevMjjljLrlrp3lronpm4blm6LnrKzkuIDlt6XkuJrln4445qCLMjA2LTIxMOWOguaIvwAADAUJDBACHBsJAAECIgAAAAwFCwwQAhwbCQABAgQAAAAMBRMMEAIAAA4GDAUBDBACHBsJAAEBBTEzMDU2AAAMBQMMEAIcGwkAAQEKU1oyMDE2MTEwNgAADAUFDBACHBsJAAEBJOa3seWcs+S4reenkemjnua1i+enkeaKgOaciemZkOWFrOWPuAAADAUHDBACHBsJAAEBa+a3seWcs+W4guWFieaYjuaWsOWMuuWFieaYjuihl+mBk+mrmOaWsOWMuumrmOaWsOi3rzEx5Y+356CU56Wl56eR5oqA5bel5Lia5Zut5py65qKw5Y6C5oi/5LqM5qW86KW/5L6n5Y2V5YWDAAAMBQkMEAIcGwkAAQEP5b6u55S15a2Q5oqA5pyvAAAMBQsMEAIcGwkAAQIEAAAADAUUDBACAAAOBgwFAQwQAhwbCQABAQUxMzA1NQAADAUDDBACHBsJAAEBCVNaMjAxNjA0MAAADAUFDBACHBsJAAEBJOa3seWcs+S4reaBkuajgOa1i+aKgOacr+aciemZkOWFrOWPuAAADAUHDBACHBsJAAEBYea3seWcs+W4guWuneWuieWMuuadvuWyl+ihl+mBk+WhmOS4i+a2jOekvuWMuuS6jOadkeiHtOWSjOi3r+W5v+ebm+Wkp+WOpuS6jOalvDIwMSjlip7lhazlnLrmiYDvvIkAAAwFCQwQAhwbCQABAlgAAAAMBQsMEAIcGwkAAQIEAAAADAUJDBACDA8CAQtSZWNvcmRjb3VudAQmMwAAARBDdXJyZW50UGFnZUluZGV4BQIPAQEMU2hvd0lucHV0Qm94AQZBbHdheXMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACSNyrC6ulzXwPOghxQO8EwzDUu8Qkh5aHGXqtfWwzzhQ==",
"ddlEnterpriseType": "",
"txtEnterpriseName": ""
}
response = requests.post(url, data=from_data)
print(response.status_code)
if response.status_code == 200:
soup = BeautifulSoup(response.text, "html.parser")
target = soup.find(id="data_list_container").findAll("tr")
rexcel = open_workbook("TokyoHot.xls")
row = rexcel.sheets()[0].nrows
excel = copy(rexcel)
table = excel.get_sheet(0)
for tr in target[1:]: # ignore th
tds = tr.findAll("td") # 获取所有的 td
td = [x for x in tds]
index = td[0].text.strip()
serialNo = td[1].text.strip()
companyName = td[2].text.strip()
location = td[3].text.strip()
domain = td[4].text.strip()
category = td[5].text.strip()
table.write(row, 0, index, set_style('Arial', 220))
table.write(row, 1, serialNo, set_style('Arial', 220))
table.write(row, 2, companyName, set_style('Arial', 220))
table.write(row, 3, location, set_style('Arial', 220))
table.write(row, 4, domain, set_style('Arial', 220))
table.write(row, 5, category, set_style('Arial', 220))
row = row + 1
excel.save('TokyoHot.xls')
else:
print("500 Error")
getTableByPage(curPage=curPage,nextPage=nextPage)
def getFirstPage():
response = requests.get("http://www.szsti.gov.cn/services/hightech/default.aspx")
if response.status_code == 200:
soup = BeautifulSoup(response.text, "html.parser")
target = soup.find(id="data_list_container").findAll("tr")
rexcel = open_workbook("TokyoHot.xls")
row = rexcel.sheets()[0].nrows
excel = copy(rexcel)
table = excel.get_sheet(0)
for tr in target[1:]: # ignore th
tds = tr.findAll("td") # 获取所有的 td
td = [x for x in tds]
index = td[0].text.strip()
serialNo = td[1].text.strip()
companyName = td[2].text.strip()
location = td[3].text.strip()
domain = td[4].text.strip()
category = td[5].text.strip()
table.write(row, 0, index, set_style('Arial', 220))
table.write(row, 1, serialNo, set_style('Arial', 220))
table.write(row, 2, companyName, set_style('Arial', 220))
table.write(row, 3, location, set_style('Arial', 220))
table.write(row, 4, domain, set_style('Arial', 220))
table.write(row, 5, category, set_style('Arial', 220))
row = row + 1
excel.save('TokyoHot.xls')
else:
print("firstPage 500 Error")
getFirstPage()
def pa(curPage=None,nextPage=None):
while curPage != 1:
getTableByPage(curPage=curPage, nextPage=nextPage)
curPage -= 1
nextPage -= 1
getFirstPage()
pa(654,655)
| 146.892157
| 10,994
| 0.890276
|
70fa22ba2c0da02145058f3796086ae60b56c582
| 706
|
py
|
Python
|
detect.py
|
jskrzypek/flask-video-streaming
|
3c1fdff06f794f80e5d259f15bb6bbf4992e12c2
|
[
"MIT"
] | null | null | null |
detect.py
|
jskrzypek/flask-video-streaming
|
3c1fdff06f794f80e5d259f15bb6bbf4992e12c2
|
[
"MIT"
] | null | null | null |
detect.py
|
jskrzypek/flask-video-streaming
|
3c1fdff06f794f80e5d259f15bb6bbf4992e12c2
|
[
"MIT"
] | null | null | null |
#import numpy as np
import cv2
import os
print("Path at terminal when executing this file")
print(os.getcwd())
# Load our cascade classifier from cars3.xml
car_cascade = cv2.CascadeClassifier(r'classifier/banana_classifier.xml')
image = cv2.imread('images/image1.jpg')
# Crop so that only the roads remain, eliminatives the distraction.
#image = image[120:,:-20]
# Use Cascade Classifier to detect cars, may have to tune the
# parameters for less false positives.
cars = car_cascade.detectMultiScale(image, 1.1, 2)
for (x,y,w,h) in cars:
cv2.rectangle(image,(x,y),(x+w,y+h),(255,0,0),2)
print('Processing', 1, ' : bananas detected : ', len(cars))
cv2.imwrite('images/'+ 'processed.jpg', image)
| 27.153846
| 72
| 0.725212
|
eeed068009dbadfe4245fda179368d09693259cf
| 992
|
py
|
Python
|
testsuite/driver/src/api/mplfe.py
|
MapleSystem/OpenArkCompiler
|
fc250857642ca38ac8b83ae7486513fadf3ab742
|
[
"MulanPSL-1.0"
] | null | null | null |
testsuite/driver/src/api/mplfe.py
|
MapleSystem/OpenArkCompiler
|
fc250857642ca38ac8b83ae7486513fadf3ab742
|
[
"MulanPSL-1.0"
] | null | null | null |
testsuite/driver/src/api/mplfe.py
|
MapleSystem/OpenArkCompiler
|
fc250857642ca38ac8b83ae7486513fadf3ab742
|
[
"MulanPSL-1.0"
] | null | null | null |
#
# Copyright (c) [2021] Huawei Technologies Co.,Ltd.All rights reserved.
#
# OpenArkCompiler is licensed under Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
#
# http://license.coscl.org.cn/MulanPSL2
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR
# FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
#
from api.shell_operator import ShellOperator
class Mplfe(ShellOperator):
def __init__(self, mplfe, infile, outfile, return_value_list=None, redirection=None):
super().__init__(return_value_list, redirection)
self.mplfe = mplfe
self.infile = infile
self.outfile = outfile
def get_command(self, variables):
self.command = self.mplfe + " " + self.infile + " -o " + self.outfile
return super().get_final_command(variables)
| 34.206897
| 89
| 0.717742
|
99b0deef656d5a003ae7337d63824568e0762665
| 5,970
|
py
|
Python
|
openstates/scrape/schemas/bill.py
|
csnardi/openstates-core
|
f08d6365cfefcdc3088a722e36a46b6b0b2e1d9c
|
[
"MIT"
] | null | null | null |
openstates/scrape/schemas/bill.py
|
csnardi/openstates-core
|
f08d6365cfefcdc3088a722e36a46b6b0b2e1d9c
|
[
"MIT"
] | null | null | null |
openstates/scrape/schemas/bill.py
|
csnardi/openstates-core
|
f08d6365cfefcdc3088a722e36a46b6b0b2e1d9c
|
[
"MIT"
] | null | null | null |
"""
Schema for bill objects.
"""
import copy
from .common import sources, extras, fuzzy_date_blank, fuzzy_datetime
from ...data import common
versions_or_documents = {
"items": {
"properties": {
"note": {"type": "string", "minLength": 1},
"date": fuzzy_date_blank,
"classification": {"type": "string"},
"links": {
"items": {
"properties": {
"media_type": {"type": "string"},
"url": {"type": "string", "format": "uri"},
},
"type": "object",
},
"type": "array",
},
},
"type": "object",
},
"type": "array",
}
versions = copy.deepcopy(versions_or_documents)
versions["items"]["properties"]["classification"][
"enum"
] = common.BILL_VERSION_CLASSIFICATIONS
documents = copy.deepcopy(versions_or_documents)
documents["items"]["properties"]["classification"][
"enum"
] = common.BILL_DOCUMENT_CLASSIFICATIONS
schema = {
"type": "object",
"properties": {
"legislative_session": {"type": "string", "minLength": 1},
"identifier": {"type": "string", "minLength": 1},
"title": {"type": "string", "minLength": 1},
"from_organization": {"type": ["string", "null"]},
"classification": {
"items": {"type": "string", "enum": common.BILL_CLASSIFICATIONS},
"type": "array",
},
"subject": {"items": {"type": "string", "minLength": 1}, "type": "array"},
"abstracts": {
"items": {
"properties": {
"abstract": {"type": "string", "minLength": 1},
"note": {"type": "string"},
"date": {"type": "string"},
},
"type": "object",
},
"type": "array",
},
"other_titles": {
"items": {
"properties": {
"title": {"type": "string", "minLength": 1},
"note": {"type": "string"},
},
"type": "object",
},
"type": "array",
},
"other_identifiers": {
"items": {
"properties": {
"identifier": {"type": "string", "minLength": 1},
"note": {"type": "string"},
"scheme": {"type": "string"},
},
"type": "object",
},
"type": "array",
},
"actions": {
"items": {
"properties": {
"organization": {"type": ["string", "null"]},
"date": fuzzy_datetime,
"description": {"type": "string", "minLength": 1},
"classification": {
"items": {
"type": "string",
"enum": common.BILL_ACTION_CLASSIFICATIONS,
},
"type": "array",
},
"related_entities": {
"items": {
"properties": {
"name": {"type": "string", "minLength": 1},
"entity_type": {
"enum": ["organization", "person", ""],
"type": "string",
},
"person_id": {"type": ["string", "null"]},
"organization_id": {"type": ["string", "null"]},
},
"type": "object",
},
"type": "array",
},
},
"type": "object",
},
"type": "array",
},
"sponsorships": {
"items": {
"properties": {
"primary": {"type": "boolean"},
"classification": {"type": "string", "minLength": 1},
"name": {"type": "string", "minLength": 1},
"entity_type": {
"enum": ["organization", "person", ""],
"type": "string",
},
"person_id": {"type": ["string", "null"]},
"organization_id": {"type": ["string", "null"]},
},
"type": "object",
},
"type": "array",
},
"related_bills": {
"items": {
"properties": {
"identifier": {"type": "string", "minLength": 1},
"legislative_session": {"type": "string", "minLength": 1},
"relation_type": {
"enum": common.BILL_RELATION_TYPES,
"type": "string",
},
},
"type": "object",
},
"type": "array",
},
"versions": versions,
"documents": documents,
"citations": {
"items": {
"properties": {
"publication": {"type": "string", "minLength": 1},
"citation": {"type": "string", "minLength": 1},
"citation_type": {
"enum": common.CITATION_TYPES,
"type": "string",
},
"effective": {"type": [fuzzy_date_blank, "null"]},
"expires": {"type": [fuzzy_date_blank, "null"]},
"url": {"type": ["string", "null"]},
},
"type": "object",
},
"type": "array",
},
"sources": sources,
"extras": extras,
},
}
| 35.325444
| 82
| 0.358124
|
bd91044ae9007d44bf0785854fa2575f1ab41592
| 8,862
|
py
|
Python
|
tests/train/test_classif_mnist.py
|
crim-ca/thelper
|
1415144cf70e4492c2ef00f834e2b9a988064a76
|
[
"Apache-2.0"
] | null | null | null |
tests/train/test_classif_mnist.py
|
crim-ca/thelper
|
1415144cf70e4492c2ef00f834e2b9a988064a76
|
[
"Apache-2.0"
] | null | null | null |
tests/train/test_classif_mnist.py
|
crim-ca/thelper
|
1415144cf70e4492c2ef00f834e2b9a988064a76
|
[
"Apache-2.0"
] | 1
|
2020-02-17T14:14:46.000Z
|
2020-02-17T14:14:46.000Z
|
import copy
import os
import shutil
from typing import Any, Optional # noqa: F401
import numpy as np
import pytest
import torch
import thelper
test_save_path = ".pytest_cache"
test_classif_mnist_name = "test-classif-mnist"
test_classif_mnist_path = os.path.join(test_save_path, test_classif_mnist_name)
test_classif_mnist_ft_path = os.path.join(test_save_path, test_classif_mnist_name + "-finetune")
@pytest.fixture
def config(request):
def fin():
shutil.rmtree(test_classif_mnist_path, ignore_errors=True)
shutil.rmtree(test_classif_mnist_ft_path, ignore_errors=True)
fin()
request.addfinalizer(fin)
return {
"name": test_classif_mnist_name,
"bypass_queries": True,
"datasets": {
"mnist": {
"type": "torchvision.datasets.MNIST",
"params": {
"root": os.path.join(test_classif_mnist_path, "mnist"),
"train": False, # use test set, its smaller (quicker test)
"download": True
},
"task": {
"type": "thelper.tasks.Classification",
"params": {
"class_names": [
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"
],
"input_key": "0",
"label_key": "1"
}
}
}
},
"loaders": {
"shuffle": True,
"batch_size": 32,
"workers": 0,
"skip_class_balancing": True,
"base_transforms": [
{
"operation": "thelper.transforms.NormalizeMinMax",
"params": {
"min": [127],
"max": [255]
}
},
{
"operation": "thelper.transforms.Unsqueeze",
"params": {
"axis": 0
}
}
],
"train_scale": 0.1,
"train_split": {
"mnist": 0.8
},
"valid_scale": 0.1,
"valid_split": {
"mnist": 0.1
},
"test_scale": 0.1,
"test_split": {
"mnist": 0.1
}
},
"model": {
"type": "thelper.nn.lenet.LeNet"
},
"trainer": {
"epochs": 2,
"monitor": "accuracy",
"optimization": {
"loss": {
"type": "torch.nn.CrossEntropyLoss"
},
"optimizer": {
"type": "torch.optim.Adam",
"params": {
"lr": 0.001
}
}
},
"metrics": {
"accuracy": {
"type": "thelper.optim.Accuracy",
"params": {
"top_k": 1
}
}
}
}
}
def test_reload(config):
train_outputs = thelper.cli.create_session(config, test_save_path)
assert len(train_outputs) == 2
assert train_outputs[0]["train/metrics"]["accuracy"] < train_outputs[1]["train/metrics"]["accuracy"]
ckptdata = thelper.utils.load_checkpoint(test_classif_mnist_path, always_load_latest=True)
override_config = copy.deepcopy(config)
override_config["trainer"]["epochs"] = 3
resume_outputs = thelper.cli.resume_session(ckptdata, save_dir=test_save_path, config=override_config)
assert len(resume_outputs) == 3
assert train_outputs[1]["train/metrics"]["accuracy"] < resume_outputs[2]["train/metrics"]["accuracy"]
ckptdata = thelper.utils.load_checkpoint(test_classif_mnist_path)
eval_outputs = thelper.cli.resume_session(ckptdata, save_dir=test_save_path, eval_only=True)
assert any(["test/metrics" in v for v in eval_outputs.values()])
override_config["trainer"]["epochs"] = 1
override_config["model"] = {"ckptdata": test_classif_mnist_path}
override_config["name"] += "-finetune"
finetune_outputs = thelper.cli.create_session(override_config, test_save_path)
assert len(finetune_outputs) == 1
assert finetune_outputs[0]["train/metrics"]["accuracy"] > train_outputs[1]["train/metrics"]["accuracy"]
def compare_dictionaries(dictA, dictB, dictA_name="A", dictB_name="B", path=""):
err, key_err, value_err = "", "", ""
old_path = path
for k in dictA.keys():
path = old_path + "[%s]" % k
if k not in dictB:
key_err += "key %s%s not in %s\n" % (dictB_name, path, dictB_name)
else:
if isinstance(dictA[k], dict) and isinstance(dictB[k], dict):
err += compare_dictionaries(dictA[k], dictB[k], dictA_name, dictB_name, path)
else:
if dictA[k] != dictB[k]:
value_err += "value of %s%s (%s) not same as %s%s (%s)\n"\
% (dictA_name, path, dictA[k], dictB_name, path, dictB[k])
for k in dictB.keys():
path = old_path + "[%s]" % k
if k not in dictA:
key_err += "key %s%s not in %s\n" % (dictB_name, path, dictA_name)
return key_err + value_err + err
def test_outputs(config):
override_config = copy.deepcopy(config)
override_config["trainer"]["use_tbx"] = True
train_outputs = thelper.cli.create_session(override_config, test_save_path)
assert len(train_outputs) == 2
assert train_outputs[0]["train/metrics"]["accuracy"] < train_outputs[1]["train/metrics"]["accuracy"]
output_path = os.path.join(test_classif_mnist_path, "output", test_classif_mnist_name)
assert os.path.isdir(output_path)
out_dirs = next(os.walk(output_path))[1]
assert len(out_dirs) == 2
assert any([out_dir.startswith("train-") for out_dir in out_dirs])
assert any([out_dir.startswith("valid-") for out_dir in out_dirs])
for out_dir in out_dirs:
ltype = "train" if out_dir.startswith("train-") else "valid"
epoch_out_path = os.path.join(output_path, out_dir)
assert os.path.isdir(epoch_out_path)
epoch_out_files = next(os.walk(epoch_out_path))[2]
assert len(epoch_out_files) == 4
assert "accuracy-0000.txt" in epoch_out_files
assert "accuracy-0001.txt" in epoch_out_files
assert "config.json" in epoch_out_files
assert any([p.startswith("events.out.tfevents.") for p in epoch_out_files])
for filename in epoch_out_files:
if filename.startswith("accuracy-"):
epoch = int(filename.split("-")[1].split(".")[0])
with open(os.path.join(epoch_out_path, filename), "r") as fd:
assert np.isclose(float(fd.readline()), train_outputs[epoch][ltype + "/metrics"]["accuracy"])
elif filename == "config.json":
backup_config_path = os.path.join(epoch_out_path, filename)
backup_config = thelper.utils.load_config(backup_config_path, add_name_if_missing=False)
assert compare_dictionaries(backup_config, override_config) == ""
def callback(task, # type: thelper.tasks.utils.Task
input, # type: thelper.typedefs.InputType
pred, # type: thelper.typedefs.AnyPredictionType
target, # type: thelper.typedefs.AnyTargetType
sample, # type: thelper.typedefs.SampleType
loss, # type: Optional[float]
iter_idx, # type: int
max_iters, # type: int
epoch_idx, # type: int
max_epochs, # type: int
**kwargs, # type: Any
): # type: (...) -> None
assert isinstance(task, thelper.tasks.Classification)
assert isinstance(input, torch.Tensor)
assert isinstance(pred, torch.Tensor)
assert isinstance(target, torch.Tensor)
assert isinstance(sample, dict)
assert isinstance(iter_idx, int)
assert isinstance(max_iters, int) and iter_idx < max_iters
assert isinstance(epoch_idx, int)
assert isinstance(max_epochs, int) and epoch_idx < max_epochs
assert "hello" in kwargs
kwargs["hello"][0] = "bye"
def test_callbacks(config, mocker):
override_config = copy.deepcopy(config)
override_config["trainer"]["callback"] = callback
override_config["trainer"]["callback_kwargs"] = {"hello": ["hi"]}
override_config["trainer"]["display"] = True
fake_draw = mocker.patch("thelper.draw.draw")
assert thelper.cli.create_session(override_config, test_save_path)
assert fake_draw.call_count > 0
assert override_config["trainer"]["callback_kwargs"]["hello"][0] == "bye"
| 40.099548
| 113
| 0.558903
|
272d2f742f3047d8e70637c087cafa8274ad7431
| 350
|
py
|
Python
|
thonny/__main__.py
|
binbinyouli1314/thonny
|
895674b800f4fd94189bed6ce0ac23067c0eecfb
|
[
"MIT"
] | 1
|
2019-02-12T06:10:53.000Z
|
2019-02-12T06:10:53.000Z
|
thonny/__main__.py
|
binbinyouli1314/thonny
|
895674b800f4fd94189bed6ce0ac23067c0eecfb
|
[
"MIT"
] | null | null | null |
thonny/__main__.py
|
binbinyouli1314/thonny
|
895674b800f4fd94189bed6ce0ac23067c0eecfb
|
[
"MIT"
] | null | null | null |
import sys
import runpy
if sys.executable.endswith("thonny.exe"):
# otherwise some library may try to run its subprocess with thonny.exe
sys.executable = sys.executable[:-len("thonny.exe")] + "pythonw.exe"
from thonny import launch
try:
runpy.run_module("thonny.customize", run_name="__main__")
except ImportError:
pass
launch()
| 20.588235
| 74
| 0.728571
|
064f03fee3ed7850909dc928fea06a9bdba71dfe
| 2,376
|
py
|
Python
|
process_excel.py
|
Cynthia7979/random-word-list-generator
|
c976bf8775d38af42cc8ee35060ecd2035ed7083
|
[
"MIT"
] | null | null | null |
process_excel.py
|
Cynthia7979/random-word-list-generator
|
c976bf8775d38af42cc8ee35060ecd2035ed7083
|
[
"MIT"
] | null | null | null |
process_excel.py
|
Cynthia7979/random-word-list-generator
|
c976bf8775d38af42cc8ee35060ecd2035ed7083
|
[
"MIT"
] | null | null | null |
# Processes formatted excel file into csv. See sources/托福红宝词汇45天突破版.xls
import xlrd
import csv
import argparse
import sys, os
def main():
words_column = 2
meanings_column = 3
word_list_end_identifier = lambda row, sheet: sheet.cell_value(row, words_column) == ''
workbook_path = './托福红宝词汇45天突破版.xls'
output_path = './sources/'
sheet_name = 'Sheet1'
if len(sys.argv) > 1:
# Parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('workbook', help='the .xls workbook path')
parser.add_argument('sheet_name', help='the name of the sheet holding word/meaning data')
parser.add_argument('--wordscol', help='number of the column holding words, starting from 0')
parser.add_argument('--meaningscol', help='number of the column holding meanings, in integer')
parser.add_argument('--end',
help='lambda function taking arguments "row" and "sheet" that returns True ' +
'when current row marks the end of useful content. See README.md for details.')
parser.add_argument('--output', help='output path')
namespace = parser.parse_args()
workbook_path, sheet_name = namespace.workbook, namespace.sheet_name
if namespace.end:
assert namespace.end.startswith('lambda '), 'End identifier must be a lambda function.'
word_list_end_identifier = eval(namespace.end)
if namespace.meaningscol:
meanings_column = int(namespace.meaningscol)
if namespace.wordscol:
words_column = int(namespace.wordscol)
workbook_name = os.path.basename(workbook_path)[:workbook_path.rfind('.')]
sheet = xlrd.open_workbook(workbook_path).sheet_by_name(sheet_name)
row = 0
with open(os.path.join(output_path, '%s.csv' % workbook_name), mode='w', newline='', encoding='utf-8') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=['word', 'meaning'])
writer.writeheader()
while not word_list_end_identifier(row, sheet):
word = sheet.cell_value(row, words_column)
meaning = sheet.cell_value(row, meanings_column)
writer.writerow({'word': word, 'meaning': meaning})
row += 1
if __name__ == '__main__':
main()
| 43.2
| 119
| 0.641414
|
4f09c0f904fe6cc31a39cf823dd18f18892a7070
| 192,345
|
py
|
Python
|
mesonbuild/interpreter.py
|
Hi-Angel/meson
|
b4347ca4a98b148ba7f7908c3d96f393294df122
|
[
"Apache-2.0"
] | null | null | null |
mesonbuild/interpreter.py
|
Hi-Angel/meson
|
b4347ca4a98b148ba7f7908c3d96f393294df122
|
[
"Apache-2.0"
] | null | null | null |
mesonbuild/interpreter.py
|
Hi-Angel/meson
|
b4347ca4a98b148ba7f7908c3d96f393294df122
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012-2018 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import mparser
from . import environment
from . import coredata
from . import dependencies
from . import mlog
from . import build
from . import optinterpreter
from . import compilers
from .wrap import wrap, WrapMode
from . import mesonlib
from .mesonlib import FileMode, Popen_safe, listify, extract_as_list, has_path_sep
from .dependencies import ExternalProgram
from .dependencies import InternalDependency, Dependency, NotFoundDependency, DependencyException
from .interpreterbase import InterpreterBase
from .interpreterbase import check_stringlist, flatten, noPosargs, noKwargs, stringArgs, permittedKwargs, noArgsFlattening
from .interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
from .interpreterbase import InterpreterObject, MutableInterpreterObject, Disabler, disablerIfNotFound
from .interpreterbase import FeatureNew, FeatureDeprecated, FeatureNewKwargs
from .interpreterbase import ObjectHolder
from .modules import ModuleReturnValue
import os, shutil, uuid
import re, shlex
import subprocess
from collections import namedtuple
from pathlib import PurePath
import traceback
import functools
import importlib
permitted_method_kwargs = {
'partial_dependency': {'compile_args', 'link_args', 'links', 'includes',
'sources'},
}
def stringifyUserArguments(args):
if isinstance(args, list):
return '[%s]' % ', '.join([stringifyUserArguments(x) for x in args])
elif isinstance(args, dict):
return '{%s}' % ', '.join(['%s : %s' % (stringifyUserArguments(k), stringifyUserArguments(v)) for k, v in args.items()])
elif isinstance(args, int):
return str(args)
elif isinstance(args, str):
return "'%s'" % args
raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.')
class FeatureOptionHolder(InterpreterObject, ObjectHolder):
def __init__(self, env, option):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, option)
if option.is_auto():
self.held_object = env.coredata.builtins['auto_features']
self.name = option.name
self.methods.update({'enabled': self.enabled_method,
'disabled': self.disabled_method,
'auto': self.auto_method,
})
@noPosargs
@permittedKwargs({})
def enabled_method(self, args, kwargs):
return self.held_object.is_enabled()
@noPosargs
@permittedKwargs({})
def disabled_method(self, args, kwargs):
return self.held_object.is_disabled()
@noPosargs
@permittedKwargs({})
def auto_method(self, args, kwargs):
return self.held_object.is_auto()
def extract_required_kwarg(kwargs, subproject, feature_check=None):
val = kwargs.get('required', True)
disabled = False
required = False
feature = None
if isinstance(val, FeatureOptionHolder):
if not feature_check:
feature_check = FeatureNew('User option "feature"', '0.47.0')
feature_check.use(subproject)
option = val.held_object
feature = val.name
if option.is_disabled():
disabled = True
elif option.is_enabled():
required = True
elif isinstance(required, bool):
required = val
else:
raise InterpreterException('required keyword argument must be boolean or a feature option')
# Keep boolean value in kwargs to simplify other places where this kwarg is
# checked.
kwargs['required'] = required
return disabled, required, feature
class TryRunResultHolder(InterpreterObject):
def __init__(self, res):
super().__init__()
self.res = res
self.methods.update({'returncode': self.returncode_method,
'compiled': self.compiled_method,
'stdout': self.stdout_method,
'stderr': self.stderr_method,
})
@noPosargs
@permittedKwargs({})
def returncode_method(self, args, kwargs):
return self.res.returncode
@noPosargs
@permittedKwargs({})
def compiled_method(self, args, kwargs):
return self.res.compiled
@noPosargs
@permittedKwargs({})
def stdout_method(self, args, kwargs):
return self.res.stdout
@noPosargs
@permittedKwargs({})
def stderr_method(self, args, kwargs):
return self.res.stderr
class RunProcess(InterpreterObject):
def __init__(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir=False, check=False, capture=True):
super().__init__()
if not isinstance(cmd, ExternalProgram):
raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
self.capture = capture
pc, self.stdout, self.stderr = self.run_command(cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
self.returncode = pc.returncode
self.methods.update({'returncode': self.returncode_method,
'stdout': self.stdout_method,
'stderr': self.stderr_method,
})
def run_command(self, cmd, args, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check=False):
command_array = cmd.get_command() + args
env = {'MESON_SOURCE_ROOT': source_dir,
'MESON_BUILD_ROOT': build_dir,
'MESON_SUBDIR': subdir,
'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
}
if in_builddir:
cwd = os.path.join(build_dir, subdir)
else:
cwd = os.path.join(source_dir, subdir)
child_env = os.environ.copy()
child_env.update(env)
stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
mlog.debug('Running command:', ' '.join(command_array))
try:
p, o, e = Popen_safe(command_array, stdout=stdout, env=child_env, cwd=cwd)
if self.capture:
mlog.debug('--- stdout ---')
mlog.debug(o)
else:
o = ''
mlog.debug('--- stdout disabled ---')
mlog.debug('--- stderr ---')
mlog.debug(e)
mlog.debug('')
if check and p.returncode != 0:
raise InterpreterException('Command "{}" failed with status {}.'.format(' '.join(command_array), p.returncode))
return p, o, e
except FileNotFoundError:
raise InterpreterException('Could not execute command "%s".' % ' '.join(command_array))
@noPosargs
@permittedKwargs({})
def returncode_method(self, args, kwargs):
return self.returncode
@noPosargs
@permittedKwargs({})
def stdout_method(self, args, kwargs):
return self.stdout
@noPosargs
@permittedKwargs({})
def stderr_method(self, args, kwargs):
return self.stderr
class ConfigureFileHolder(InterpreterObject, ObjectHolder):
def __init__(self, subdir, sourcename, targetname, configuration_data):
InterpreterObject.__init__(self)
obj = build.ConfigureFile(subdir, sourcename, targetname, configuration_data)
ObjectHolder.__init__(self, obj)
class EnvironmentVariablesHolder(MutableInterpreterObject, ObjectHolder):
def __init__(self):
MutableInterpreterObject.__init__(self)
ObjectHolder.__init__(self, build.EnvironmentVariables())
self.methods.update({'set': self.set_method,
'append': self.append_method,
'prepend': self.prepend_method,
})
def __repr__(self):
repr_str = "<{0}: {1}>"
return repr_str.format(self.__class__.__name__, self.held_object.envvars)
def add_var(self, method, args, kwargs):
if not isinstance(kwargs.get("separator", ""), str):
raise InterpreterException("EnvironmentVariablesHolder methods 'separator'"
" argument needs to be a string.")
if len(args) < 2:
raise InterpreterException("EnvironmentVariablesHolder methods require at least"
"2 arguments, first is the name of the variable and"
" following one are values")
self.held_object.envvars.append((method, args[0], args[1:], kwargs))
@stringArgs
@permittedKwargs({'separator'})
def set_method(self, args, kwargs):
self.add_var(self.held_object.set, args, kwargs)
@stringArgs
@permittedKwargs({'separator'})
def append_method(self, args, kwargs):
self.add_var(self.held_object.append, args, kwargs)
@stringArgs
@permittedKwargs({'separator'})
def prepend_method(self, args, kwargs):
self.add_var(self.held_object.prepend, args, kwargs)
class ConfigurationDataHolder(MutableInterpreterObject, ObjectHolder):
def __init__(self, pv):
MutableInterpreterObject.__init__(self)
self.used = False # These objects become immutable after use in configure_file.
ObjectHolder.__init__(self, build.ConfigurationData(), pv)
self.methods.update({'set': self.set_method,
'set10': self.set10_method,
'set_quoted': self.set_quoted_method,
'has': self.has_method,
'get': self.get_method,
'get_unquoted': self.get_unquoted_method,
'merge_from': self.merge_from_method,
})
def is_used(self):
return self.used
def mark_used(self):
self.used = True
def validate_args(self, args, kwargs):
if len(args) == 1 and isinstance(args[0], list) and len(args[0]) == 2:
mlog.deprecation('Passing a list as the single argument to '
'configuration_data.set is deprecated. This will '
'become a hard error in the future.')
args = args[0]
if len(args) != 2:
raise InterpreterException("Configuration set requires 2 arguments.")
if self.used:
raise InterpreterException("Can not set values on configuration object that has been used.")
name = args[0]
val = args[1]
if not isinstance(val, (int, str)):
msg = 'Setting a configuration data value to {!r} is invalid, ' \
'and will fail at configure_file(). If you are using it ' \
'just to store some values, please use a dict instead.'
mlog.deprecation(msg.format(val))
desc = kwargs.get('description', None)
if not isinstance(name, str):
raise InterpreterException("First argument to set must be a string.")
if desc is not None and not isinstance(desc, str):
raise InterpreterException('Description must be a string.')
return name, val, desc
@noArgsFlattening
def set_method(self, args, kwargs):
(name, val, desc) = self.validate_args(args, kwargs)
self.held_object.values[name] = (val, desc)
def set_quoted_method(self, args, kwargs):
(name, val, desc) = self.validate_args(args, kwargs)
if not isinstance(val, str):
raise InterpreterException("Second argument to set_quoted must be a string.")
escaped_val = '\\"'.join(val.split('"'))
self.held_object.values[name] = ('"' + escaped_val + '"', desc)
def set10_method(self, args, kwargs):
(name, val, desc) = self.validate_args(args, kwargs)
if val:
self.held_object.values[name] = (1, desc)
else:
self.held_object.values[name] = (0, desc)
def has_method(self, args, kwargs):
return args[0] in self.held_object.values
@FeatureNew('configuration_data.get()', '0.38.0')
@noArgsFlattening
def get_method(self, args, kwargs):
if len(args) < 1 or len(args) > 2:
raise InterpreterException('Get method takes one or two arguments.')
name = args[0]
if name in self.held_object:
return self.held_object.get(name)[0]
if len(args) > 1:
return args[1]
raise InterpreterException('Entry %s not in configuration data.' % name)
@FeatureNew('configuration_data.get_unquoted()', '0.44.0')
def get_unquoted_method(self, args, kwargs):
if len(args) < 1 or len(args) > 2:
raise InterpreterException('Get method takes one or two arguments.')
name = args[0]
if name in self.held_object:
val = self.held_object.get(name)[0]
elif len(args) > 1:
val = args[1]
else:
raise InterpreterException('Entry %s not in configuration data.' % name)
if val[0] == '"' and val[-1] == '"':
return val[1:-1]
return val
def get(self, name):
return self.held_object.values[name] # (val, desc)
def keys(self):
return self.held_object.values.keys()
def merge_from_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Merge_from takes one positional argument.')
from_object = args[0]
if not isinstance(from_object, ConfigurationDataHolder):
raise InterpreterException('Merge_from argument must be a configuration data object.')
from_object = from_object.held_object
for k, v in from_object.values.items():
self.held_object.values[k] = v
# Interpreter objects can not be pickled so we must have
# these wrappers.
class DependencyHolder(InterpreterObject, ObjectHolder):
def __init__(self, dep, pv):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, dep, pv)
self.methods.update({'found': self.found_method,
'type_name': self.type_name_method,
'version': self.version_method,
'name': self.name_method,
'get_pkgconfig_variable': self.pkgconfig_method,
'get_configtool_variable': self.configtool_method,
'partial_dependency': self.partial_dependency_method,
})
def found(self):
return self.found_method([], {})
@noPosargs
@permittedKwargs({})
def type_name_method(self, args, kwargs):
return self.held_object.type_name
@noPosargs
@permittedKwargs({})
def found_method(self, args, kwargs):
if self.held_object.type_name == 'internal':
return True
return self.held_object.found()
@noPosargs
@permittedKwargs({})
def version_method(self, args, kwargs):
return self.held_object.get_version()
@noPosargs
@permittedKwargs({})
def name_method(self, args, kwargs):
return self.held_object.get_name()
@permittedKwargs({'define_variable', 'default'})
def pkgconfig_method(self, args, kwargs):
args = listify(args)
if len(args) != 1:
raise InterpreterException('get_pkgconfig_variable takes exactly one argument.')
varname = args[0]
if not isinstance(varname, str):
raise InterpreterException('Variable name must be a string.')
return self.held_object.get_pkgconfig_variable(varname, kwargs)
@FeatureNew('dep.get_configtool_variable', '0.44.0')
@permittedKwargs({})
def configtool_method(self, args, kwargs):
args = listify(args)
if len(args) != 1:
raise InterpreterException('get_configtool_variable takes exactly one argument.')
varname = args[0]
if not isinstance(varname, str):
raise InterpreterException('Variable name must be a string.')
return self.held_object.get_configtool_variable(varname)
@FeatureNew('dep.partial_dependency', '0.46.0')
@noPosargs
@permittedKwargs(permitted_method_kwargs['partial_dependency'])
def partial_dependency_method(self, args, kwargs):
pdep = self.held_object.get_partial_dependency(**kwargs)
return DependencyHolder(pdep, self.subproject)
class InternalDependencyHolder(InterpreterObject, ObjectHolder):
def __init__(self, dep, pv):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, dep, pv)
self.methods.update({'found': self.found_method,
'version': self.version_method,
'partial_dependency': self.partial_dependency_method,
})
@noPosargs
@permittedKwargs({})
def found_method(self, args, kwargs):
return True
@noPosargs
@permittedKwargs({})
def version_method(self, args, kwargs):
return self.held_object.get_version()
@FeatureNew('dep.partial_dependency', '0.46.0')
@noPosargs
@permittedKwargs(permitted_method_kwargs['partial_dependency'])
def partial_dependency_method(self, args, kwargs):
pdep = self.held_object.get_partial_dependency(**kwargs)
return DependencyHolder(pdep, self.subproject)
class ExternalProgramHolder(InterpreterObject, ObjectHolder):
def __init__(self, ep):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, ep)
self.methods.update({'found': self.found_method,
'path': self.path_method})
@noPosargs
@permittedKwargs({})
def found_method(self, args, kwargs):
return self.found()
@noPosargs
@permittedKwargs({})
def path_method(self, args, kwargs):
return self.held_object.get_path()
def found(self):
return isinstance(self.held_object, build.Executable) or self.held_object.found()
def get_command(self):
return self.held_object.get_command()
def get_name(self):
return self.held_object.get_name()
class ExternalLibraryHolder(InterpreterObject, ObjectHolder):
def __init__(self, el, pv):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, el, pv)
self.methods.update({'found': self.found_method,
'type_name': self.type_name_method,
'partial_dependency': self.partial_dependency_method,
})
def found(self):
return self.held_object.found()
@noPosargs
@permittedKwargs({})
def type_name_method(self, args, kwargs):
return self.held_object.type_name
@noPosargs
@permittedKwargs({})
def found_method(self, args, kwargs):
return self.found()
def get_name(self):
return self.held_object.name
def get_compile_args(self):
return self.held_object.get_compile_args()
def get_link_args(self):
return self.held_object.get_link_args()
def get_exe_args(self):
return self.held_object.get_exe_args()
@FeatureNew('dep.partial_dependency', '0.46.0')
@noPosargs
@permittedKwargs(permitted_method_kwargs['partial_dependency'])
def partial_dependency_method(self, args, kwargs):
pdep = self.held_object.get_partial_dependency(**kwargs)
return DependencyHolder(pdep, self.subproject)
class GeneratorHolder(InterpreterObject, ObjectHolder):
@FeatureNewKwargs('generator', '0.43.0', ['capture'])
def __init__(self, interp, args, kwargs):
self.interpreter = interp
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, build.Generator(args, kwargs), interp.subproject)
self.methods.update({'process': self.process_method})
@FeatureNewKwargs('generator.process', '0.45.0', ['preserve_path_from'])
@permittedKwargs({'extra_args', 'preserve_path_from'})
def process_method(self, args, kwargs):
extras = mesonlib.stringlistify(kwargs.get('extra_args', []))
if 'preserve_path_from' in kwargs:
preserve_path_from = kwargs['preserve_path_from']
if not isinstance(preserve_path_from, str):
raise InvalidArguments('Preserve_path_from must be a string.')
preserve_path_from = os.path.normpath(preserve_path_from)
if not os.path.isabs(preserve_path_from):
# This is a bit of a hack. Fix properly before merging.
raise InvalidArguments('Preserve_path_from must be an absolute path for now. Sorry.')
else:
preserve_path_from = None
gl = self.held_object.process_files('Generator', args, self.interpreter,
preserve_path_from, extra_args=extras)
return GeneratedListHolder(gl)
class GeneratedListHolder(InterpreterObject, ObjectHolder):
def __init__(self, arg1, extra_args=[]):
InterpreterObject.__init__(self)
if isinstance(arg1, GeneratorHolder):
ObjectHolder.__init__(self, build.GeneratedList(arg1.held_object, extra_args))
else:
ObjectHolder.__init__(self, arg1)
def __repr__(self):
r = '<{}: {!r}>'
return r.format(self.__class__.__name__, self.held_object.get_outputs())
def add_file(self, a):
self.held_object.add_file(a)
# A machine that's statically known from the cross file
class MachineHolder(InterpreterObject, ObjectHolder):
def __init__(self, machine_info):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, machine_info)
self.methods.update({'system': self.system_method,
'cpu': self.cpu_method,
'cpu_family': self.cpu_family_method,
'endian': self.endian_method,
})
@noPosargs
@permittedKwargs({})
def cpu_family_method(self, args, kwargs):
return self.held_object.cpu_family
@noPosargs
@permittedKwargs({})
def cpu_method(self, args, kwargs):
return self.held_object.cpu
@noPosargs
@permittedKwargs({})
def system_method(self, args, kwargs):
return self.held_object.system
@noPosargs
@permittedKwargs({})
def endian_method(self, args, kwargs):
return self.held_object.endian
class IncludeDirsHolder(InterpreterObject, ObjectHolder):
def __init__(self, idobj):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, idobj)
class Headers(InterpreterObject):
def __init__(self, sources, kwargs):
InterpreterObject.__init__(self)
self.sources = sources
self.install_subdir = kwargs.get('subdir', '')
self.custom_install_dir = kwargs.get('install_dir', None)
self.custom_install_mode = kwargs.get('install_mode', None)
if self.custom_install_dir is not None:
if not isinstance(self.custom_install_dir, str):
raise InterpreterException('Custom_install_dir must be a string.')
def set_install_subdir(self, subdir):
self.install_subdir = subdir
def get_install_subdir(self):
return self.install_subdir
def get_sources(self):
return self.sources
def get_custom_install_dir(self):
return self.custom_install_dir
def get_custom_install_mode(self):
return self.custom_install_mode
class DataHolder(InterpreterObject, ObjectHolder):
def __init__(self, data):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, data)
def get_source_subdir(self):
return self.held_object.source_subdir
def get_sources(self):
return self.held_object.sources
def get_install_dir(self):
return self.held_object.install_dir
class InstallDir(InterpreterObject):
def __init__(self, src_subdir, inst_subdir, install_dir, install_mode, exclude, strip_directory):
InterpreterObject.__init__(self)
self.source_subdir = src_subdir
self.installable_subdir = inst_subdir
self.install_dir = install_dir
self.install_mode = install_mode
self.exclude = exclude
self.strip_directory = strip_directory
class Man(InterpreterObject):
def __init__(self, sources, kwargs):
InterpreterObject.__init__(self)
self.sources = sources
self.validate_sources()
self.custom_install_dir = kwargs.get('install_dir', None)
self.custom_install_mode = kwargs.get('install_mode', None)
if self.custom_install_dir is not None and not isinstance(self.custom_install_dir, str):
raise InterpreterException('Custom_install_dir must be a string.')
def validate_sources(self):
for s in self.sources:
try:
num = int(s.split('.')[-1])
except (IndexError, ValueError):
num = 0
if num < 1 or num > 8:
raise InvalidArguments('Man file must have a file extension of a number between 1 and 8')
def get_custom_install_dir(self):
return self.custom_install_dir
def get_custom_install_mode(self):
return self.custom_install_mode
def get_sources(self):
return self.sources
class GeneratedObjectsHolder(InterpreterObject, ObjectHolder):
def __init__(self, held_object):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, held_object)
class TargetHolder(InterpreterObject, ObjectHolder):
def __init__(self, target, interp):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, target, interp.subproject)
self.interpreter = interp
class BuildTargetHolder(TargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
self.methods.update({'extract_objects': self.extract_objects_method,
'extract_all_objects': self.extract_all_objects_method,
'get_id': self.get_id_method,
'outdir': self.outdir_method,
'full_path': self.full_path_method,
'private_dir_include': self.private_dir_include_method,
})
def __repr__(self):
r = '<{} {}: {}>'
h = self.held_object
return r.format(self.__class__.__name__, h.get_id(), h.filename)
def is_cross(self):
return self.held_object.is_cross()
@noPosargs
@permittedKwargs({})
def private_dir_include_method(self, args, kwargs):
return IncludeDirsHolder(build.IncludeDirs('', [], False,
[self.interpreter.backend.get_target_private_dir(self.held_object)]))
@noPosargs
@permittedKwargs({})
def full_path_method(self, args, kwargs):
return self.interpreter.backend.get_target_filename_abs(self.held_object)
@noPosargs
@permittedKwargs({})
def outdir_method(self, args, kwargs):
return self.interpreter.backend.get_target_dir(self.held_object)
@permittedKwargs({})
def extract_objects_method(self, args, kwargs):
gobjs = self.held_object.extract_objects(args)
return GeneratedObjectsHolder(gobjs)
@FeatureNewKwargs('extract_all_objects', '0.46.0', ['recursive'])
@noPosargs
@permittedKwargs({'recursive'})
def extract_all_objects_method(self, args, kwargs):
recursive = kwargs.get('recursive', False)
gobjs = self.held_object.extract_all_objects(recursive)
if gobjs.objlist and 'recursive' not in kwargs:
mlog.warning('extract_all_objects called without setting recursive '
'keyword argument. Meson currently defaults to '
'non-recursive to maintain backward compatibility but '
'the default will be changed in the future.')
return GeneratedObjectsHolder(gobjs)
@noPosargs
@permittedKwargs({})
def get_id_method(self, args, kwargs):
return self.held_object.get_id()
class ExecutableHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
class StaticLibraryHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
class SharedLibraryHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
# Set to True only when called from self.func_shared_lib().
target.shared_library_only = False
class BothLibrariesHolder(BuildTargetHolder):
def __init__(self, shared_holder, static_holder, interp):
# FIXME: This build target always represents the shared library, but
# that should be configurable.
super().__init__(shared_holder.held_object, interp)
self.shared_holder = shared_holder
self.static_holder = static_holder
self.methods.update({'get_shared_lib': self.get_shared_lib_method,
'get_static_lib': self.get_static_lib_method,
})
def __repr__(self):
r = '<{} {}: {}, {}: {}>'
h1 = self.shared_holder.held_object
h2 = self.static_holder.held_object
return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
@noPosargs
@permittedKwargs({})
def get_shared_lib_method(self, args, kwargs):
return self.shared_holder
@noPosargs
@permittedKwargs({})
def get_static_lib_method(self, args, kwargs):
return self.static_holder
class SharedModuleHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
class JarHolder(BuildTargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
class CustomTargetIndexHolder(InterpreterObject, ObjectHolder):
def __init__(self, object_to_hold):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, object_to_hold)
class CustomTargetHolder(TargetHolder):
def __init__(self, target, interp):
super().__init__(target, interp)
self.methods.update({'full_path': self.full_path_method,
})
def __repr__(self):
r = '<{} {}: {}>'
h = self.held_object
return r.format(self.__class__.__name__, h.get_id(), h.command)
@noPosargs
@permittedKwargs({})
def full_path_method(self, args, kwargs):
return self.interpreter.backend.get_target_filename_abs(self.held_object)
def __getitem__(self, index):
return CustomTargetIndexHolder(self.held_object[index])
def __setitem__(self, index, value):
raise InterpreterException('Cannot set a member of a CustomTarget')
def __delitem__(self, index):
raise InterpreterException('Cannot delete a member of a CustomTarget')
def outdir_include(self):
return IncludeDirsHolder(build.IncludeDirs('', [], False,
[os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(self.held_object))]))
class RunTargetHolder(InterpreterObject, ObjectHolder):
def __init__(self, name, command, args, dependencies, subdir, subproject):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, build.RunTarget(name, command, args, dependencies, subdir, subproject))
def __repr__(self):
r = '<{} {}: {}>'
h = self.held_object
return r.format(self.__class__.__name__, h.get_id(), h.command)
class Test(InterpreterObject):
def __init__(self, name, project, suite, exe, depends, is_parallel,
cmd_args, env, should_fail, timeout, workdir):
InterpreterObject.__init__(self)
self.name = name
self.suite = suite
self.project_name = project
self.exe = exe
self.depends = depends
self.is_parallel = is_parallel
self.cmd_args = cmd_args
self.env = env
self.should_fail = should_fail
self.timeout = timeout
self.workdir = workdir
def get_exe(self):
return self.exe
def get_name(self):
return self.name
class SubprojectHolder(InterpreterObject, ObjectHolder):
def __init__(self, subinterpreter, subproject_dir, name):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, subinterpreter)
self.name = name
self.subproject_dir = subproject_dir
self.methods.update({'get_variable': self.get_variable_method,
'found': self.found_method,
})
@noPosargs
@permittedKwargs({})
def found_method(self, args, kwargs):
return self.found()
def found(self):
return self.held_object is not None
@permittedKwargs({})
def get_variable_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Get_variable takes one argument.')
if not self.found():
raise InterpreterException('Subproject "%s/%s" disabled can\'t get_variable on it.' % (
self.subproject_dir, self.name))
varname = args[0]
if not isinstance(varname, str):
raise InterpreterException('Get_variable takes a string argument.')
if varname not in self.held_object.variables:
raise InvalidArguments('Requested variable "{0}" not found.'.format(varname))
return self.held_object.variables[varname]
class CompilerHolder(InterpreterObject):
def __init__(self, compiler, env, subproject):
InterpreterObject.__init__(self)
self.compiler = compiler
self.environment = env
self.subproject = subproject
self.methods.update({'compiles': self.compiles_method,
'links': self.links_method,
'get_id': self.get_id_method,
'compute_int': self.compute_int_method,
'sizeof': self.sizeof_method,
'get_define': self.get_define_method,
'check_header': self.check_header_method,
'has_header': self.has_header_method,
'has_header_symbol': self.has_header_symbol_method,
'run': self.run_method,
'has_function': self.has_function_method,
'has_member': self.has_member_method,
'has_members': self.has_members_method,
'has_type': self.has_type_method,
'alignment': self.alignment_method,
'version': self.version_method,
'cmd_array': self.cmd_array_method,
'find_library': self.find_library_method,
'has_argument': self.has_argument_method,
'has_function_attribute': self.has_func_attribute_method,
'get_supported_function_attributes': self.get_supported_function_attributes_method,
'has_multi_arguments': self.has_multi_arguments_method,
'get_supported_arguments': self.get_supported_arguments_method,
'first_supported_argument': self.first_supported_argument_method,
'has_link_argument': self.has_link_argument_method,
'has_multi_link_arguments': self.has_multi_link_arguments_method,
'get_supported_link_arguments': self.get_supported_link_arguments_method,
'first_supported_link_argument': self.first_supported_link_argument_method,
'unittest_args': self.unittest_args_method,
'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
'get_argument_syntax': self.get_argument_syntax_method,
})
def _dep_msg(self, deps, endl):
msg_single = 'with dependency {}'
msg_many = 'with dependencies {}'
if not deps:
return endl
if endl is None:
endl = ''
tpl = msg_many if len(deps) > 1 else msg_single
names = []
for d in deps:
if isinstance(d, dependencies.ExternalLibrary):
name = '-l' + d.name
else:
name = d.name
names.append(name)
return tpl.format(', '.join(names)) + endl
@noPosargs
@permittedKwargs({})
def version_method(self, args, kwargs):
return self.compiler.version
@noPosargs
@permittedKwargs({})
def cmd_array_method(self, args, kwargs):
return self.compiler.exelist
def determine_args(self, kwargs, mode='link'):
nobuiltins = kwargs.get('no_builtin_args', False)
if not isinstance(nobuiltins, bool):
raise InterpreterException('Type of no_builtin_args not a boolean.')
args = []
incdirs = extract_as_list(kwargs, 'include_directories')
for i in incdirs:
if not isinstance(i, IncludeDirsHolder):
raise InterpreterException('Include directories argument must be an include_directories object.')
for idir in i.held_object.get_incdirs():
idir = os.path.join(self.environment.get_source_dir(),
i.held_object.get_curdir(), idir)
args += self.compiler.get_include_args(idir, False)
if not nobuiltins:
opts = self.environment.coredata.compiler_options
args += self.compiler.get_option_compile_args(opts)
if mode == 'link':
args += self.compiler.get_option_link_args(opts)
args += mesonlib.stringlistify(kwargs.get('args', []))
return args
def determine_dependencies(self, kwargs, endl=':'):
deps = kwargs.get('dependencies', None)
if deps is not None:
deps = listify(deps)
final_deps = []
for d in deps:
try:
d = d.held_object
except Exception:
pass
if isinstance(d, InternalDependency) or not isinstance(d, Dependency):
raise InterpreterException('Dependencies must be external dependencies')
final_deps.append(d)
deps = final_deps
return deps, self._dep_msg(deps, endl)
@permittedKwargs({
'prefix',
'args',
'dependencies',
})
def alignment_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Alignment method takes exactly one positional argument.')
check_stringlist(args)
typename = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of sizeof must be a string.')
extra_args = mesonlib.stringlistify(kwargs.get('args', []))
deps, msg = self.determine_dependencies(kwargs)
result = self.compiler.alignment(typename, prefix, self.environment,
extra_args=extra_args,
dependencies=deps)
mlog.log('Checking for alignment of', mlog.bold(typename, True), msg, result)
return result
@permittedKwargs({
'name',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def run_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Run method takes exactly one positional argument.')
code = args[0]
if isinstance(code, mesonlib.File):
code = mesonlib.File.from_absolute_file(
code.rel_to_builddir(self.environment.source_dir))
elif not isinstance(code, str):
raise InvalidArguments('Argument must be string or file.')
testname = kwargs.get('name', '')
if not isinstance(testname, str):
raise InterpreterException('Testname argument must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs, endl=None)
result = self.compiler.run(code, self.environment, extra_args=extra_args,
dependencies=deps)
if len(testname) > 0:
if not result.compiled:
h = mlog.red('DID NOT COMPILE')
elif result.returncode == 0:
h = mlog.green('YES')
else:
h = mlog.red('NO (%d)' % result.returncode)
mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h)
return TryRunResultHolder(result)
@noPosargs
@permittedKwargs({})
def get_id_method(self, args, kwargs):
return self.compiler.get_id()
@noPosargs
@permittedKwargs({})
def symbols_have_underscore_prefix_method(self, args, kwargs):
'''
Check if the compiler prefixes _ (underscore) to global C symbols
See: https://en.wikipedia.org/wiki/Name_mangling#C
'''
return self.compiler.symbols_have_underscore_prefix(self.environment)
@noPosargs
@permittedKwargs({})
def unittest_args_method(self, args, kwargs):
'''
This function is deprecated and should not be used.
It can be removed in a future version of Meson.
'''
if not hasattr(self.compiler, 'get_feature_args'):
raise InterpreterException('This {} compiler has no feature arguments.'.format(self.compiler.get_display_language()))
build_to_src = os.path.relpath(self.environment.get_source_dir(), self.environment.get_build_dir())
return self.compiler.get_feature_args({'unittest': 'true'}, build_to_src)
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def has_member_method(self, args, kwargs):
if len(args) != 2:
raise InterpreterException('Has_member takes exactly two arguments.')
check_stringlist(args)
typename = args[0]
membername = args[1]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_member must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
had = self.compiler.has_members(typename, [membername], prefix,
self.environment,
extra_args=extra_args,
dependencies=deps)
if had:
hadtxt = mlog.green('YES')
else:
hadtxt = mlog.red('NO')
mlog.log('Checking whether type', mlog.bold(typename, True),
'has member', mlog.bold(membername, True), msg, hadtxt)
return had
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def has_members_method(self, args, kwargs):
if len(args) < 2:
raise InterpreterException('Has_members needs at least two arguments.')
check_stringlist(args)
typename = args[0]
membernames = args[1:]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_members must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
had = self.compiler.has_members(typename, membernames, prefix,
self.environment,
extra_args=extra_args,
dependencies=deps)
if had:
hadtxt = mlog.green('YES')
else:
hadtxt = mlog.red('NO')
members = mlog.bold(', '.join(['"{}"'.format(m) for m in membernames]))
mlog.log('Checking whether type', mlog.bold(typename, True),
'has members', members, msg, hadtxt)
return had
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def has_function_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Has_function takes exactly one argument.')
check_stringlist(args)
funcname = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_function must be a string.')
extra_args = self.determine_args(kwargs)
deps, msg = self.determine_dependencies(kwargs)
had = self.compiler.has_function(funcname, prefix, self.environment,
extra_args=extra_args,
dependencies=deps)
if had:
hadtxt = mlog.green('YES')
else:
hadtxt = mlog.red('NO')
mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt)
return had
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def has_type_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Has_type takes exactly one argument.')
check_stringlist(args)
typename = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_type must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
had = self.compiler.has_type(typename, prefix, self.environment,
extra_args=extra_args, dependencies=deps)
if had:
hadtxt = mlog.green('YES')
else:
hadtxt = mlog.red('NO')
mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt)
return had
@FeatureNew('compiler.compute_int', '0.40.0')
@permittedKwargs({
'prefix',
'low',
'high',
'guess',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def compute_int_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Compute_int takes exactly one argument.')
check_stringlist(args)
expression = args[0]
prefix = kwargs.get('prefix', '')
low = kwargs.get('low', None)
high = kwargs.get('high', None)
guess = kwargs.get('guess', None)
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of compute_int must be a string.')
if low is not None and not isinstance(low, int):
raise InterpreterException('Low argument of compute_int must be an int.')
if high is not None and not isinstance(high, int):
raise InterpreterException('High argument of compute_int must be an int.')
if guess is not None and not isinstance(guess, int):
raise InterpreterException('Guess argument of compute_int must be an int.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
res = self.compiler.compute_int(expression, low, high, guess, prefix,
self.environment, extra_args=extra_args,
dependencies=deps)
mlog.log('Computing int of', mlog.bold(expression, True), msg, res)
return res
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def sizeof_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Sizeof takes exactly one argument.')
check_stringlist(args)
element = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of sizeof must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
esize = self.compiler.sizeof(element, prefix, self.environment,
extra_args=extra_args, dependencies=deps)
mlog.log('Checking for size of', mlog.bold(element, True), msg, esize)
return esize
@FeatureNew('compiler.get_define', '0.40.0')
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def get_define_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('get_define() takes exactly one argument.')
check_stringlist(args)
element = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of get_define() must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
value = self.compiler.get_define(element, prefix, self.environment,
extra_args=extra_args,
dependencies=deps)
mlog.log('Fetching value of define', mlog.bold(element, True), msg, value)
return value
@permittedKwargs({
'name',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def compiles_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('compiles method takes exactly one argument.')
code = args[0]
if isinstance(code, mesonlib.File):
code = mesonlib.File.from_absolute_file(
code.rel_to_builddir(self.environment.source_dir))
elif not isinstance(code, str):
raise InvalidArguments('Argument must be string or file.')
testname = kwargs.get('name', '')
if not isinstance(testname, str):
raise InterpreterException('Testname argument must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs, endl=None)
result = self.compiler.compiles(code, self.environment,
extra_args=extra_args,
dependencies=deps)
if len(testname) > 0:
if result:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h)
return result
@permittedKwargs({
'name',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def links_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('links method takes exactly one argument.')
code = args[0]
if isinstance(code, mesonlib.File):
code = mesonlib.File.from_absolute_file(
code.rel_to_builddir(self.environment.source_dir))
elif not isinstance(code, str):
raise InvalidArguments('Argument must be string or file.')
testname = kwargs.get('name', '')
if not isinstance(testname, str):
raise InterpreterException('Testname argument must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs, endl=None)
result = self.compiler.links(code, self.environment,
extra_args=extra_args,
dependencies=deps)
if len(testname) > 0:
if result:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h)
return result
@FeatureNew('compiler.check_header', '0.47.0')
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def check_header_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('check_header method takes exactly one argument.')
check_stringlist(args)
hname = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_header must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
haz = self.compiler.check_header(hname, prefix, self.environment,
extra_args=extra_args,
dependencies=deps)
if haz:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log('Check usable header', mlog.bold(hname, True), msg, h)
return haz
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def has_header_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('has_header method takes exactly one argument.')
check_stringlist(args)
hname = args[0]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_header must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
haz = self.compiler.has_header(hname, prefix, self.environment,
extra_args=extra_args, dependencies=deps)
if haz:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log('Has header', mlog.bold(hname, True), msg, h)
return haz
@permittedKwargs({
'prefix',
'no_builtin_args',
'include_directories',
'args',
'dependencies',
})
def has_header_symbol_method(self, args, kwargs):
if len(args) != 2:
raise InterpreterException('has_header_symbol method takes exactly two arguments.')
check_stringlist(args)
hname = args[0]
symbol = args[1]
prefix = kwargs.get('prefix', '')
if not isinstance(prefix, str):
raise InterpreterException('Prefix argument of has_header_symbol must be a string.')
extra_args = functools.partial(self.determine_args, kwargs)
deps, msg = self.determine_dependencies(kwargs)
haz = self.compiler.has_header_symbol(hname, symbol, prefix, self.environment,
extra_args=extra_args,
dependencies=deps)
if haz:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log('Header <{0}> has symbol'.format(hname), mlog.bold(symbol, True), msg, h)
return haz
@FeatureNewKwargs('compiler.find_library', '0.49.0', ['disabler'])
@disablerIfNotFound
@permittedKwargs({
'required',
'dirs',
})
def find_library_method(self, args, kwargs):
# TODO add dependencies support?
if len(args) != 1:
raise InterpreterException('find_library method takes one argument.')
libname = args[0]
if not isinstance(libname, str):
raise InterpreterException('Library name not a string.')
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled')
lib = dependencies.ExternalLibrary(libname, None,
self.environment,
self.compiler.language,
silent=True)
return ExternalLibraryHolder(lib, self.subproject)
search_dirs = mesonlib.stringlistify(kwargs.get('dirs', []))
for i in search_dirs:
if not os.path.isabs(i):
raise InvalidCode('Search directory %s is not an absolute path.' % i)
linkargs = self.compiler.find_library(libname, self.environment, search_dirs)
if required and not linkargs:
raise InterpreterException('{} library {!r} not found'.format(self.compiler.get_display_language(), libname))
lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
self.compiler.language)
return ExternalLibraryHolder(lib, self.subproject)
@permittedKwargs({})
def has_argument_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
if len(args) != 1:
raise InterpreterException('has_argument takes exactly one argument.')
return self.has_multi_arguments_method(args, kwargs)
@permittedKwargs({})
def has_multi_arguments_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
result = self.compiler.has_multi_arguments(args, self.environment)
if result:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log(
'Compiler for {} supports arguments {}:'.format(
self.compiler.get_display_language(), ' '.join(args)),
h)
return result
@FeatureNew('compiler.get_supported_arguments', '0.43.0')
@permittedKwargs({})
def get_supported_arguments_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
supported_args = []
for arg in args:
if self.has_argument_method(arg, kwargs):
supported_args.append(arg)
return supported_args
@permittedKwargs({})
def first_supported_argument_method(self, args, kwargs):
for i in mesonlib.stringlistify(args):
if self.has_argument_method(i, kwargs):
mlog.log('First supported argument:', mlog.bold(i))
return [i]
mlog.log('First supported argument:', mlog.red('None'))
return []
@FeatureNew('compiler.has_link_argument', '0.46.0')
@permittedKwargs({})
def has_link_argument_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
if len(args) != 1:
raise InterpreterException('has_link_argument takes exactly one argument.')
return self.has_multi_link_arguments_method(args, kwargs)
@FeatureNew('compiler.has_multi_link_argument', '0.46.0')
@permittedKwargs({})
def has_multi_link_arguments_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
result = self.compiler.has_multi_link_arguments(args, self.environment)
if result:
h = mlog.green('YES')
else:
h = mlog.red('NO')
mlog.log(
'Compiler for {} supports link arguments {}:'.format(
self.compiler.get_display_language(), ' '.join(args)),
h)
return result
@FeatureNew('compiler.get_supported_link_arguments_method', '0.46.0')
@permittedKwargs({})
def get_supported_link_arguments_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
supported_args = []
for arg in args:
if self.has_link_argument_method(arg, kwargs):
supported_args.append(arg)
return supported_args
@FeatureNew('compiler.first_supported_link_argument_method', '0.46.0')
@permittedKwargs({})
def first_supported_link_argument_method(self, args, kwargs):
for i in mesonlib.stringlistify(args):
if self.has_link_argument_method(i, kwargs):
mlog.log('First supported link argument:', mlog.bold(i))
return [i]
mlog.log('First supported link argument:', mlog.red('None'))
return []
@FeatureNew('compiler.has_function_attribute', '0.48.0')
@permittedKwargs({})
def has_func_attribute_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
if len(args) != 1:
raise InterpreterException('has_func_attribute takes exactly one argument.')
result = self.compiler.has_func_attribute(args[0], self.environment)
h = mlog.green('YES') if result else mlog.red('NO')
mlog.log('Compiler for {} supports function attribute {}:'.format(self.compiler.get_display_language(), args[0]), h)
return result
@FeatureNew('compiler.get_supported_function_attributes', '0.48.0')
@permittedKwargs({})
def get_supported_function_attributes_method(self, args, kwargs):
args = mesonlib.stringlistify(args)
return [a for a in args if self.has_func_attribute_method(a, kwargs)]
@FeatureNew('compiler.get_argument_syntax_method', '0.49.0')
@noPosargs
@noKwargs
def get_argument_syntax_method(self, args, kwargs):
return self.compiler.get_argument_syntax()
ModuleState = namedtuple('ModuleState', [
'build_to_src', 'subproject', 'subdir', 'current_lineno', 'environment',
'project_name', 'project_version', 'backend', 'compilers', 'targets',
'data', 'headers', 'man', 'global_args', 'project_args', 'build_machine',
'host_machine', 'target_machine'])
class ModuleHolder(InterpreterObject, ObjectHolder):
def __init__(self, modname, module, interpreter):
InterpreterObject.__init__(self)
ObjectHolder.__init__(self, module)
self.modname = modname
self.interpreter = interpreter
def method_call(self, method_name, args, kwargs):
try:
fn = getattr(self.held_object, method_name)
except AttributeError:
raise InvalidArguments('Module %s does not have method %s.' % (self.modname, method_name))
if method_name.startswith('_'):
raise InvalidArguments('Function {!r} in module {!r} is private.'.format(method_name, self.modname))
if not getattr(fn, 'no-args-flattening', False):
args = flatten(args)
# This is not 100% reliable but we can't use hash()
# because the Build object contains dicts and lists.
num_targets = len(self.interpreter.build.targets)
state = ModuleState(
build_to_src=mesonlib.relpath(self.interpreter.environment.get_source_dir(),
self.interpreter.environment.get_build_dir()),
subproject=self.interpreter.subproject,
subdir=self.interpreter.subdir,
current_lineno=self.interpreter.current_lineno,
environment=self.interpreter.environment,
project_name=self.interpreter.build.project_name,
project_version=self.interpreter.build.dep_manifest[self.interpreter.active_projectname],
# The backend object is under-used right now, but we will need it:
# https://github.com/mesonbuild/meson/issues/1419
backend=self.interpreter.backend,
compilers=self.interpreter.build.compilers,
targets=self.interpreter.build.targets,
data=self.interpreter.build.data,
headers=self.interpreter.build.get_headers(),
man=self.interpreter.build.get_man(),
global_args=self.interpreter.build.global_args,
project_args=self.interpreter.build.projects_args.get(self.interpreter.subproject, {}),
build_machine=self.interpreter.builtin['build_machine'].held_object,
host_machine=self.interpreter.builtin['host_machine'].held_object,
target_machine=self.interpreter.builtin['target_machine'].held_object,
)
if self.held_object.is_snippet(method_name):
value = fn(self.interpreter, state, args, kwargs)
return self.interpreter.holderify(value)
else:
value = fn(state, args, kwargs)
if num_targets != len(self.interpreter.build.targets):
raise InterpreterException('Extension module altered internal state illegally.')
return self.interpreter.module_method_callback(value)
class MesonMain(InterpreterObject):
def __init__(self, build, interpreter):
InterpreterObject.__init__(self)
self.build = build
self.interpreter = interpreter
self._found_source_scripts = {}
self.methods.update({'get_compiler': self.get_compiler_method,
'is_cross_build': self.is_cross_build_method,
'has_exe_wrapper': self.has_exe_wrapper_method,
'is_unity': self.is_unity_method,
'is_subproject': self.is_subproject_method,
'current_source_dir': self.current_source_dir_method,
'current_build_dir': self.current_build_dir_method,
'source_root': self.source_root_method,
'build_root': self.build_root_method,
'add_install_script': self.add_install_script_method,
'add_postconf_script': self.add_postconf_script_method,
'add_dist_script': self.add_dist_script_method,
'install_dependency_manifest': self.install_dependency_manifest_method,
'override_find_program': self.override_find_program_method,
'project_version': self.project_version_method,
'project_license': self.project_license_method,
'version': self.version_method,
'project_name': self.project_name_method,
'get_cross_property': self.get_cross_property_method,
'backend': self.backend_method,
})
def _find_source_script(self, name, args):
# Prefer scripts in the current source directory
search_dir = os.path.join(self.interpreter.environment.source_dir,
self.interpreter.subdir)
key = (name, search_dir)
if key in self._found_source_scripts:
found = self._found_source_scripts[key]
else:
found = dependencies.ExternalProgram(name, search_dir=search_dir)
if found.found():
self._found_source_scripts[key] = found
else:
m = 'Script or command {!r} not found or not executable'
raise InterpreterException(m.format(name))
return build.RunScript(found.get_command(), args)
@permittedKwargs({})
def add_install_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_install_script takes one or more arguments')
check_stringlist(args, 'add_install_script args must be strings')
script = self._find_source_script(args[0], args[1:])
self.build.install_scripts.append(script)
@permittedKwargs({})
def add_postconf_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_postconf_script takes one or more arguments')
check_stringlist(args, 'add_postconf_script arguments must be strings')
script = self._find_source_script(args[0], args[1:])
self.build.postconf_scripts.append(script)
@permittedKwargs({})
def add_dist_script_method(self, args, kwargs):
if len(args) < 1:
raise InterpreterException('add_dist_script takes one or more arguments')
if len(args) > 1:
FeatureNew('Calling "add_dist_script" with multiple arguments', '0.49.0').use(self.interpreter.subproject)
check_stringlist(args, 'add_dist_script argument must be a string')
if self.interpreter.subproject != '':
raise InterpreterException('add_dist_script may not be used in a subproject.')
script = self._find_source_script(args[0], args[1:])
self.build.dist_scripts.append(script)
@noPosargs
@permittedKwargs({})
def current_source_dir_method(self, args, kwargs):
src = self.interpreter.environment.source_dir
sub = self.interpreter.subdir
if sub == '':
return src
return os.path.join(src, sub)
@noPosargs
@permittedKwargs({})
def current_build_dir_method(self, args, kwargs):
src = self.interpreter.environment.build_dir
sub = self.interpreter.subdir
if sub == '':
return src
return os.path.join(src, sub)
@noPosargs
@permittedKwargs({})
def backend_method(self, args, kwargs):
return self.interpreter.backend.name
@noPosargs
@permittedKwargs({})
def source_root_method(self, args, kwargs):
return self.interpreter.environment.source_dir
@noPosargs
@permittedKwargs({})
def build_root_method(self, args, kwargs):
return self.interpreter.environment.build_dir
@noPosargs
@permittedKwargs({})
def has_exe_wrapper_method(self, args, kwargs):
if self.is_cross_build_method(None, None) and \
self.build.environment.cross_info.need_exe_wrapper():
if self.build.environment.exe_wrapper is None:
return False
# We return True when exe_wrap is defined, when it's not needed, and
# when we're compiling natively. The last two are semantically confusing.
# Need to revisit this.
return True
@noPosargs
@permittedKwargs({})
def is_cross_build_method(self, args, kwargs):
return self.build.environment.is_cross_build()
@permittedKwargs({'native'})
def get_compiler_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('get_compiler_method must have one and only one argument.')
cname = args[0]
native = kwargs.get('native', None)
if native is None:
if self.build.environment.is_cross_build():
native = False
else:
native = True
if not isinstance(native, bool):
raise InterpreterException('Type of "native" must be a boolean.')
if native:
clist = self.build.compilers
else:
clist = self.build.cross_compilers
if cname in clist:
return CompilerHolder(clist[cname], self.build.environment, self.interpreter.subproject)
raise InterpreterException('Tried to access compiler for unspecified language "%s".' % cname)
@noPosargs
@permittedKwargs({})
def is_unity_method(self, args, kwargs):
optval = self.interpreter.environment.coredata.get_builtin_option('unity')
if optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject()):
return True
return False
@noPosargs
@permittedKwargs({})
def is_subproject_method(self, args, kwargs):
return self.interpreter.is_subproject()
@permittedKwargs({})
def install_dependency_manifest_method(self, args, kwargs):
if len(args) != 1:
raise InterpreterException('Must specify manifest install file name')
if not isinstance(args[0], str):
raise InterpreterException('Argument must be a string.')
self.build.dep_manifest_name = args[0]
@FeatureNew('meson.override_find_program', '0.46.0')
@permittedKwargs({})
def override_find_program_method(self, args, kwargs):
if len(args) != 2:
raise InterpreterException('Override needs two arguments')
name = args[0]
exe = args[1]
if not isinstance(name, str):
raise InterpreterException('First argument must be a string')
if hasattr(exe, 'held_object'):
exe = exe.held_object
if isinstance(exe, mesonlib.File):
abspath = exe.absolute_path(self.interpreter.environment.source_dir,
self.interpreter.environment.build_dir)
if not os.path.exists(abspath):
raise InterpreterException('Tried to override %s with a file that does not exist.' % name)
exe = dependencies.ExternalProgram(abspath)
if not isinstance(exe, (dependencies.ExternalProgram, build.Executable)):
raise InterpreterException('Second argument must be an external program or executable.')
self.interpreter.add_find_program_override(name, exe)
@noPosargs
@permittedKwargs({})
def project_version_method(self, args, kwargs):
return self.build.dep_manifest[self.interpreter.active_projectname]['version']
@FeatureNew('meson.project_license()', '0.45.0')
@noPosargs
@permittedKwargs({})
def project_license_method(self, args, kwargs):
return self.build.dep_manifest[self.interpreter.active_projectname]['license']
@noPosargs
@permittedKwargs({})
def version_method(self, args, kwargs):
return coredata.version
@noPosargs
@permittedKwargs({})
def project_name_method(self, args, kwargs):
return self.interpreter.active_projectname
@noArgsFlattening
@permittedKwargs({})
def get_cross_property_method(self, args, kwargs):
if len(args) < 1 or len(args) > 2:
raise InterpreterException('Must have one or two arguments.')
propname = args[0]
if not isinstance(propname, str):
raise InterpreterException('Property name must be string.')
try:
props = self.interpreter.environment.cross_info.get_properties()
return props[propname]
except Exception:
if len(args) == 2:
return args[1]
raise InterpreterException('Unknown cross property: %s.' % propname)
known_library_kwargs = (
build.known_shlib_kwargs |
build.known_stlib_kwargs
)
known_build_target_kwargs = (
known_library_kwargs |
build.known_exe_kwargs |
build.known_jar_kwargs |
{'target_type'}
)
permitted_kwargs = {'add_global_arguments': {'language', 'native'},
'add_global_link_arguments': {'language', 'native'},
'add_languages': {'required'},
'add_project_link_arguments': {'language', 'native'},
'add_project_arguments': {'language', 'native'},
'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env'},
'benchmark': {'args', 'env', 'should_fail', 'timeout', 'workdir', 'suite'},
'build_target': known_build_target_kwargs,
'configure_file': {'input', 'output', 'configuration', 'command', 'copy', 'install_dir', 'install_mode', 'capture', 'install', 'format', 'output_format', 'encoding'},
'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'install_mode', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default', 'build_always_stale', 'console'},
'dependency': {'default_options', 'fallback', 'language', 'main', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version', 'private_headers'},
'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version'},
'executable': build.known_exe_kwargs,
'find_program': {'required', 'native'},
'generator': {'arguments', 'output', 'depfile', 'capture', 'preserve_path_from'},
'include_directories': {'is_system'},
'install_data': {'install_dir', 'install_mode', 'rename', 'sources'},
'install_headers': {'install_dir', 'install_mode', 'subdir'},
'install_man': {'install_dir', 'install_mode'},
'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'},
'jar': build.known_jar_kwargs,
'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},
'run_command': {'check', 'capture'},
'run_target': {'command', 'depends'},
'shared_library': build.known_shlib_kwargs,
'shared_module': build.known_shmod_kwargs,
'static_library': build.known_stlib_kwargs,
'both_libraries': known_library_kwargs,
'library': known_library_kwargs,
'subdir': {'if_found'},
'subproject': {'version', 'default_options', 'required'},
'test': {'args', 'depends', 'env', 'is_parallel', 'should_fail', 'timeout', 'workdir', 'suite'},
'vcs_tag': {'input', 'output', 'fallback', 'command', 'replace_string'},
}
class Interpreter(InterpreterBase):
def __init__(self, build, backend=None, subproject='', subdir='', subproject_dir='subprojects',
modules = None, default_project_options=None, mock=False):
super().__init__(build.environment.get_source_dir(), subdir)
self.an_unpicklable_object = mesonlib.an_unpicklable_object
self.build = build
self.environment = build.environment
self.coredata = self.environment.get_coredata()
self.backend = backend
self.subproject = subproject
if modules is None:
self.modules = {}
else:
self.modules = modules
# Subproject directory is usually the name of the subproject, but can
# be different for dependencies provided by wrap files.
self.subproject_directory_name = subdir.split(os.path.sep)[-1]
self.subproject_dir = subproject_dir
self.option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
if not mock:
self.load_root_meson_file()
self.sanity_check_ast()
self.builtin.update({'meson': MesonMain(build, self)})
self.generators = []
self.visited_subdirs = {}
self.project_args_frozen = False
self.global_args_frozen = False # implies self.project_args_frozen
self.subprojects = {}
self.subproject_stack = []
self.configure_file_outputs = {}
# Passed from the outside, only used in subprojects.
if default_project_options:
self.default_project_options = default_project_options.copy()
else:
self.default_project_options = {}
self.project_default_options = {}
self.build_func_dict()
# build_def_files needs to be defined before parse_project is called
self.build_def_files = [os.path.join(self.subdir, environment.build_filename)]
if not mock:
self.parse_project()
# Initialize machine descriptions. We can do a better job now because we
# have the compilers needed to gain more knowledge, so wipe out old
# inferrence and start over.
self.build.environment.machines.miss_defaulting()
self.build.environment.machines.detect_build(self.coredata.compilers)
self.build.environment.machines.default_missing()
assert self.build.environment.machines.build.cpu is not None
assert self.build.environment.machines.host.cpu is not None
assert self.build.environment.machines.target.cpu is not None
self.builtin['build_machine'] = \
MachineHolder(self.build.environment.machines.build)
self.builtin['host_machine'] = \
MachineHolder(self.build.environment.machines.host)
self.builtin['target_machine'] = \
MachineHolder(self.build.environment.machines.target)
def get_non_matching_default_options(self):
env = self.environment
for def_opt_name, def_opt_value in self.project_default_options.items():
for option_type in [
env.coredata.builtins, env.coredata.compiler_options,
env.coredata.backend_options, env.coredata.base_options,
env.coredata.user_options]:
for cur_opt_name, cur_opt_value in option_type.items():
if (def_opt_name == cur_opt_name and
def_opt_value != cur_opt_value.value):
yield (def_opt_name, def_opt_value, cur_opt_value.value)
def build_func_dict(self):
self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
'add_project_arguments': self.func_add_project_arguments,
'add_global_link_arguments': self.func_add_global_link_arguments,
'add_project_link_arguments': self.func_add_project_link_arguments,
'add_test_setup': self.func_add_test_setup,
'add_languages': self.func_add_languages,
'assert': self.func_assert,
'benchmark': self.func_benchmark,
'build_target': self.func_build_target,
'configuration_data': self.func_configuration_data,
'configure_file': self.func_configure_file,
'custom_target': self.func_custom_target,
'declare_dependency': self.func_declare_dependency,
'dependency': self.func_dependency,
'disabler': self.func_disabler,
'environment': self.func_environment,
'error': self.func_error,
'executable': self.func_executable,
'generator': self.func_generator,
'gettext': self.func_gettext,
'get_option': self.func_get_option,
'get_variable': self.func_get_variable,
'files': self.func_files,
'find_library': self.func_find_library,
'find_program': self.func_find_program,
'include_directories': self.func_include_directories,
'import': self.func_import,
'install_data': self.func_install_data,
'install_headers': self.func_install_headers,
'install_man': self.func_install_man,
'install_subdir': self.func_install_subdir,
'is_variable': self.func_is_variable,
'jar': self.func_jar,
'join_paths': self.func_join_paths,
'library': self.func_library,
'message': self.func_message,
'warning': self.func_warning,
'option': self.func_option,
'project': self.func_project,
'run_target': self.func_run_target,
'run_command': self.func_run_command,
'set_variable': self.func_set_variable,
'subdir': self.func_subdir,
'subdir_done': self.func_subdir_done,
'subproject': self.func_subproject,
'shared_library': self.func_shared_lib,
'shared_module': self.func_shared_module,
'static_library': self.func_static_lib,
'both_libraries': self.func_both_lib,
'test': self.func_test,
'vcs_tag': self.func_vcs_tag
})
if 'MESON_UNIT_TEST' in os.environ:
self.funcs.update({'exception': self.func_exception})
def holderify(self, item):
if isinstance(item, list):
return [self.holderify(x) for x in item]
if isinstance(item, build.CustomTarget):
return CustomTargetHolder(item, self)
elif isinstance(item, (int, str, bool)) or item is None:
return item
elif isinstance(item, build.Executable):
return ExecutableHolder(item, self)
elif isinstance(item, build.GeneratedList):
return GeneratedListHolder(item)
elif isinstance(item, build.RunTarget):
raise RuntimeError('This is not a pipe.')
elif isinstance(item, build.RunScript):
raise RuntimeError('Do not do this.')
elif isinstance(item, build.Data):
return DataHolder(item)
elif isinstance(item, dependencies.InternalDependency):
return InternalDependencyHolder(item, self.subproject)
elif isinstance(item, dependencies.ExternalDependency):
return DependencyHolder(item, self.subproject)
elif isinstance(item, dependencies.ExternalProgram):
return ExternalProgramHolder(item)
elif hasattr(item, 'held_object'):
return item
else:
raise InterpreterException('Module returned a value of unknown type.')
def process_new_values(self, invalues):
invalues = listify(invalues)
for v in invalues:
if isinstance(v, (RunTargetHolder, CustomTargetHolder, BuildTargetHolder)):
v = v.held_object
if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
self.add_target(v.name, v)
elif isinstance(v, list):
self.module_method_callback(v)
elif isinstance(v, build.GeneratedList):
pass
elif isinstance(v, build.RunScript):
self.build.install_scripts.append(v)
elif isinstance(v, build.Data):
self.build.data.append(v)
elif isinstance(v, dependencies.ExternalProgram):
return ExternalProgramHolder(v)
elif isinstance(v, dependencies.InternalDependency):
# FIXME: This is special cased and not ideal:
# The first source is our new VapiTarget, the rest are deps
self.process_new_values(v.sources[0])
elif hasattr(v, 'held_object'):
pass
elif isinstance(v, (int, str, bool)):
pass
else:
raise InterpreterException('Module returned a value of unknown type.')
def module_method_callback(self, return_object):
if not isinstance(return_object, ModuleReturnValue):
raise InterpreterException('Bug in module, it returned an invalid object')
invalues = return_object.new_objects
self.process_new_values(invalues)
return self.holderify(return_object.return_value)
def get_build_def_files(self):
return self.build_def_files
def get_variables(self):
return self.variables
def check_cross_stdlibs(self):
if self.build.environment.is_cross_build():
cross_info = self.build.environment.cross_info
for l, c in self.build.cross_compilers.items():
try:
di = mesonlib.stringlistify(cross_info.get_stdlib(l))
if len(di) != 2:
raise InterpreterException('Stdlib definition for %s should have exactly two elements.'
% l)
projname, depname = di
subproj = self.do_subproject(projname, {})
self.build.cross_stdlibs[l] = subproj.get_variable_method([depname], {})
except KeyError:
pass
except InvalidArguments:
pass
@stringArgs
@noKwargs
def func_import(self, node, args, kwargs):
if len(args) != 1:
raise InvalidCode('Import takes one argument.')
modname = args[0]
if modname.startswith('unstable-'):
plainname = modname.split('-', 1)[1]
mlog.warning('Module %s has no backwards or forwards compatibility and might not exist in future releases.' % modname, location=node)
modname = 'unstable_' + plainname
if modname not in self.modules:
try:
module = importlib.import_module('mesonbuild.modules.' + modname)
except ImportError:
raise InvalidArguments('Module "%s" does not exist' % (modname, ))
self.modules[modname] = module.initialize(self)
return ModuleHolder(modname, self.modules[modname], self)
@stringArgs
@noKwargs
def func_files(self, node, args, kwargs):
return [mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, fname) for fname in args]
@FeatureNewKwargs('declare_dependency', '0.46.0', ['link_whole'])
@permittedKwargs(permitted_kwargs['declare_dependency'])
@noPosargs
def func_declare_dependency(self, node, args, kwargs):
version = kwargs.get('version', self.project_version)
if not isinstance(version, str):
raise InterpreterException('Version must be a string.')
incs = extract_as_list(kwargs, 'include_directories', unholder=True)
libs = extract_as_list(kwargs, 'link_with', unholder=True)
libs_whole = extract_as_list(kwargs, 'link_whole', unholder=True)
sources = extract_as_list(kwargs, 'sources')
sources = listify(self.source_strings_to_files(sources), unholder=True)
deps = extract_as_list(kwargs, 'dependencies', unholder=True)
compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))
link_args = mesonlib.stringlistify(kwargs.get('link_args', []))
final_deps = []
for d in deps:
try:
d = d.held_object
except Exception:
pass
if not isinstance(d, (dependencies.Dependency, dependencies.ExternalLibrary, dependencies.InternalDependency)):
raise InterpreterException('Dependencies must be external deps')
final_deps.append(d)
for l in libs:
if isinstance(l, dependencies.Dependency):
raise InterpreterException('''Entries in "link_with" may only be self-built targets,
external dependencies (including libraries) must go to "dependencies".''')
dep = dependencies.InternalDependency(version, incs, compile_args,
link_args, libs, libs_whole, sources, final_deps)
return DependencyHolder(dep, self.subproject)
@noKwargs
def func_assert(self, node, args, kwargs):
if len(args) != 2:
raise InterpreterException('Assert takes exactly two arguments')
value, message = args
if not isinstance(value, bool):
raise InterpreterException('Assert value not bool.')
if not isinstance(message, str):
raise InterpreterException('Assert message not a string.')
if not value:
raise InterpreterException('Assert failed: ' + message)
def validate_arguments(self, args, argcount, arg_types):
if argcount is not None:
if argcount != len(args):
raise InvalidArguments('Expected %d arguments, got %d.' %
(argcount, len(args)))
for i in range(min(len(args), len(arg_types))):
wanted = arg_types[i]
actual = args[i]
if wanted is not None:
if not isinstance(actual, wanted):
raise InvalidArguments('Incorrect argument type.')
@FeatureNewKwargs('run_command', '0.47.0', ['check', 'capture'])
@permittedKwargs(permitted_kwargs['run_command'])
def func_run_command(self, node, args, kwargs):
return self.run_command_impl(node, args, kwargs)
def run_command_impl(self, node, args, kwargs, in_builddir=False):
if len(args) < 1:
raise InterpreterException('Not enough arguments')
cmd = args[0]
cargs = args[1:]
capture = kwargs.get('capture', True)
srcdir = self.environment.get_source_dir()
builddir = self.environment.get_build_dir()
check = kwargs.get('check', False)
if not isinstance(check, bool):
raise InterpreterException('Check must be boolean.')
m = 'must be a string, or the output of find_program(), files() '\
'or configure_file(), or a compiler object; not {!r}'
if isinstance(cmd, ExternalProgramHolder):
cmd = cmd.held_object
if isinstance(cmd, build.Executable):
progname = node.args.arguments[0].value
msg = 'Program {!r} was overridden with the compiled executable {!r}'\
' and therefore cannot be used during configuration'
raise InterpreterException(msg.format(progname, cmd.description()))
elif isinstance(cmd, CompilerHolder):
cmd = cmd.compiler.get_exelist()[0]
prog = ExternalProgram(cmd, silent=True)
if not prog.found():
raise InterpreterException('Program {!r} not found '
'or not executable'.format(cmd))
cmd = prog
else:
if isinstance(cmd, mesonlib.File):
cmd = cmd.absolute_path(srcdir, builddir)
elif not isinstance(cmd, str):
raise InterpreterException('First argument ' + m.format(cmd))
# Prefer scripts in the current source directory
search_dir = os.path.join(srcdir, self.subdir)
prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
if not prog.found():
raise InterpreterException('Program or command {!r} not found '
'or not executable'.format(cmd))
cmd = prog
cmd_path = mesonlib.relpath(cmd.get_path(), start=srcdir)
if not cmd_path.startswith('..') and cmd_path not in self.build_def_files:
self.build_def_files.append(cmd_path)
expanded_args = []
for a in listify(cargs):
if isinstance(a, str):
expanded_args.append(a)
elif isinstance(a, mesonlib.File):
expanded_args.append(a.absolute_path(srcdir, builddir))
elif isinstance(a, ExternalProgramHolder):
expanded_args.append(a.held_object.get_path())
else:
raise InterpreterException('Arguments ' + m.format(a))
for a in expanded_args:
if not os.path.isabs(a):
a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
if os.path.isfile(a):
a = mesonlib.relpath(a, start=srcdir)
if not a.startswith('..'):
if a not in self.build_def_files:
self.build_def_files.append(a)
return RunProcess(cmd, expanded_args, srcdir, builddir, self.subdir,
self.environment.get_build_command() + ['introspect'],
in_builddir=in_builddir, check=check, capture=capture)
@stringArgs
def func_gettext(self, nodes, args, kwargs):
raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead')
def func_option(self, nodes, args, kwargs):
raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
@FeatureNewKwargs('subproject', '0.38.0', ['default_options'])
@permittedKwargs(permitted_kwargs['subproject'])
@stringArgs
def func_subproject(self, nodes, args, kwargs):
if len(args) != 1:
raise InterpreterException('Subproject takes exactly one argument')
dirname = args[0]
return self.do_subproject(dirname, kwargs)
def disabled_subproject(self, dirname):
self.subprojects[dirname] = SubprojectHolder(None, self.subproject_dir, dirname)
return self.subprojects[dirname]
def do_subproject(self, dirname, kwargs):
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('\nSubproject', mlog.bold(dirname), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
return self.disabled_subproject(dirname)
default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
default_options = coredata.create_options_dict(default_options)
if dirname == '':
raise InterpreterException('Subproject dir name must not be empty.')
if dirname[0] == '.':
raise InterpreterException('Subproject dir name must not start with a period.')
if '..' in dirname:
raise InterpreterException('Subproject name must not contain a ".." path segment.')
if os.path.isabs(dirname):
raise InterpreterException('Subproject name must not be an absolute path.')
if has_path_sep(dirname):
mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.')
if dirname in self.subproject_stack:
fullstack = self.subproject_stack + [dirname]
incpath = ' => '.join(fullstack)
raise InvalidCode('Recursive include of subprojects: %s.' % incpath)
if dirname in self.subprojects:
subproject = self.subprojects[dirname]
if required and not subproject.found():
raise InterpreterException('Subproject "%s/%s" required but not found.' % (
self.subproject_dir, dirname))
return subproject
subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir)
r = wrap.Resolver(subproject_dir_abs, self.coredata.get_builtin_option('wrap_mode'))
try:
resolved = r.resolve(dirname)
except wrap.WrapException as e:
subprojdir = os.path.join(self.subproject_dir, r.directory)
if not required:
mlog.log('\nSubproject ', mlog.bold(subprojdir), 'is buildable:', mlog.red('NO'), '(disabling)\n')
return self.disabled_subproject(dirname)
if isinstance(e, wrap.WrapNotFoundException):
# if the reason subproject execution failed was because
# the directory doesn't exist, try to give some helpful
# advice if it's a nested subproject that needs
# promotion...
self.print_nested_info(dirname)
msg = 'Failed to initialize {!r}:\n{}'
raise InterpreterException(msg.format(subprojdir, e))
subdir = os.path.join(self.subproject_dir, resolved)
os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
self.global_args_frozen = True
mlog.log()
with mlog.nested():
try:
mlog.log('\nExecuting subproject', mlog.bold(dirname), '\n')
new_build = self.build.copy()
subi = Interpreter(new_build, self.backend, dirname, subdir, self.subproject_dir,
self.modules, default_options)
subi.subprojects = self.subprojects
subi.subproject_stack = self.subproject_stack + [dirname]
current_active = self.active_projectname
subi.run()
mlog.log('\nSubproject', mlog.bold(dirname), 'finished.')
except Exception as e:
if not required:
mlog.log(e)
mlog.log('\nSubproject', mlog.bold(dirname), 'is buildable:', mlog.red('NO'), '(disabling)')
return self.disabled_subproject(dirname)
else:
raise e
if 'version' in kwargs:
pv = subi.project_version
wanted = kwargs['version']
if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
raise InterpreterException('Subproject %s version is %s but %s required.' % (dirname, pv, wanted))
self.active_projectname = current_active
self.build.subprojects[dirname] = subi.project_version
self.subprojects.update(subi.subprojects)
self.subprojects[dirname] = SubprojectHolder(subi, self.subproject_dir, dirname)
self.build_def_files += subi.build_def_files
self.build.merge(subi.build)
return self.subprojects[dirname]
def get_option_internal(self, optname):
# Some base options are not defined in some environments, return the
# default value from compilers.base_options in that case.
for d in [self.coredata.base_options, compilers.base_options,
self.coredata.builtins, self.coredata.compiler_options]:
try:
return d[optname]
except KeyError:
pass
raw_optname = optname
if self.is_subproject():
optname = self.subproject + ':' + optname
try:
opt = self.coredata.user_options[optname]
if opt.yielding and ':' in optname and raw_optname in self.coredata.user_options:
popt = self.coredata.user_options[raw_optname]
if type(opt) is type(popt):
opt = popt
else:
# Get class name, then option type as a string
opt_type = opt.__class__.__name__[4:][:-6].lower()
popt_type = popt.__class__.__name__[4:][:-6].lower()
# This is not a hard error to avoid dependency hell, the workaround
# when this happens is to simply set the subproject's option directly.
mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
'to parent option of type {3!r}, ignoring parent value. '
'Use -D{2}:{0}=value to set the value for this option manually'
'.'.format(raw_optname, opt_type, self.subproject, popt_type))
return opt
except KeyError:
pass
raise InterpreterException('Tried to access unknown option "%s".' % optname)
@stringArgs
@noKwargs
def func_get_option(self, nodes, args, kwargs):
if len(args) != 1:
raise InterpreterException('Argument required for get_option.')
optname = args[0]
if ':' in optname:
raise InterpreterException('Having a colon in option name is forbidden, '
'projects are not allowed to directly access '
'options of other subprojects.')
opt = self.get_option_internal(optname)
if isinstance(opt, coredata.UserFeatureOption):
return FeatureOptionHolder(self.environment, opt)
elif isinstance(opt, coredata.UserOption):
return opt.value
return opt
@noKwargs
def func_configuration_data(self, node, args, kwargs):
if len(args) > 1:
raise InterpreterException('configuration_data takes only one optional positional arguments')
elif len(args) == 1:
initial_values = args[0]
if not isinstance(initial_values, dict):
raise InterpreterException('configuration_data first argument must be a dictionary')
else:
initial_values = {}
cdata = ConfigurationDataHolder(self.subproject)
for k, v in initial_values.items():
cdata.set_method([k, v], {})
return cdata
def set_options(self, default_options):
# Set default options as if they were passed to the command line.
# Subprojects can only define default for user options.
for k, v in default_options.items():
if self.subproject:
if optinterpreter.is_invalid_name(k):
continue
k = self.subproject + ':' + k
self.environment.cmd_line_options.setdefault(k, v)
# Create a subset of cmd_line_options, keeping only options for this
# subproject. Also take builtin options if it's the main project.
# Language and backend specific options will be set later when adding
# languages and setting the backend (builtin options must be set first
# to know which backend we'll use).
options = {}
for k, v in self.environment.cmd_line_options.items():
if self.subproject:
if not k.startswith(self.subproject + ':'):
continue
elif k not in coredata.get_builtin_options():
if ':' in k:
continue
if optinterpreter.is_invalid_name(k):
continue
options[k] = v
self.coredata.set_options(options, self.subproject)
def set_backend(self):
# The backend is already set when parsing subprojects
if self.backend is not None:
return
backend = self.coredata.get_builtin_option('backend')
if backend == 'ninja':
from .backend import ninjabackend
self.backend = ninjabackend.NinjaBackend(self.build)
elif backend == 'vs':
from .backend import vs2010backend
self.backend = vs2010backend.autodetect_vs_version(self.build)
self.coredata.set_builtin_option('backend', self.backend.name)
mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
elif backend == 'vs2010':
from .backend import vs2010backend
self.backend = vs2010backend.Vs2010Backend(self.build)
elif backend == 'vs2015':
from .backend import vs2015backend
self.backend = vs2015backend.Vs2015Backend(self.build)
elif backend == 'vs2017':
from .backend import vs2017backend
self.backend = vs2017backend.Vs2017Backend(self.build)
elif backend == 'xcode':
from .backend import xcodebackend
self.backend = xcodebackend.XCodeBackend(self.build)
else:
raise InterpreterException('Unknown backend "%s".' % backend)
# Only init backend options on first invocation otherwise it would
# override values previously set from command line.
if self.environment.first_invocation:
self.coredata.init_backend_options(backend)
options = {k: v for k, v in self.environment.cmd_line_options.items() if k.startswith('backend_')}
self.coredata.set_options(options)
@stringArgs
@permittedKwargs(permitted_kwargs['project'])
def func_project(self, node, args, kwargs):
if len(args) < 1:
raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.')
proj_name = args[0]
proj_langs = args[1:]
if ':' in proj_name:
raise InvalidArguments("Project name {!r} must not contain ':'".format(proj_name))
if os.path.exists(self.option_file):
oi = optinterpreter.OptionInterpreter(self.subproject)
oi.process(self.option_file)
self.coredata.merge_user_options(oi.options)
# Do not set default_options on reconfigure otherwise it would override
# values previously set from command line. That means that changing
# default_options in a project will trigger a reconfigure but won't
# have any effect.
self.project_default_options = mesonlib.stringlistify(kwargs.get('default_options', []))
self.project_default_options = coredata.create_options_dict(self.project_default_options)
if self.environment.first_invocation:
default_options = self.project_default_options
default_options.update(self.default_project_options)
else:
default_options = {}
self.set_options(default_options)
self.set_backend()
if not self.is_subproject():
self.build.project_name = proj_name
self.active_projectname = proj_name
self.project_version = kwargs.get('version', 'undefined')
if self.build.project_version is None:
self.build.project_version = self.project_version
proj_license = mesonlib.stringlistify(kwargs.get('license', 'unknown'))
self.build.dep_manifest[proj_name] = {'version': self.project_version,
'license': proj_license}
if self.subproject in self.build.projects:
raise InvalidCode('Second call to project().')
if not self.is_subproject() and 'subproject_dir' in kwargs:
spdirname = kwargs['subproject_dir']
if not isinstance(spdirname, str):
raise InterpreterException('Subproject_dir must be a string')
if os.path.isabs(spdirname):
raise InterpreterException('Subproject_dir must not be an absolute path.')
if spdirname.startswith('.'):
raise InterpreterException('Subproject_dir must not begin with a period.')
if '..' in spdirname:
raise InterpreterException('Subproject_dir must not contain a ".." segment.')
self.subproject_dir = spdirname
self.build.subproject_dir = self.subproject_dir
mesonlib.project_meson_versions[self.subproject] = ''
if 'meson_version' in kwargs:
cv = coredata.version
pv = kwargs['meson_version']
mesonlib.project_meson_versions[self.subproject] = pv
if not mesonlib.version_compare(cv, pv):
raise InterpreterException('Meson version is %s but project requires %s.' % (cv, pv))
self.build.projects[self.subproject] = proj_name
mlog.log('Project name:', mlog.bold(proj_name))
mlog.log('Project version:', mlog.bold(self.project_version))
self.add_languages(proj_langs, True)
langs = self.coredata.compilers.keys()
if 'vala' in langs:
if 'c' not in langs:
raise InterpreterException('Compiling Vala requires C. Add C to your project languages and rerun Meson.')
if not self.is_subproject():
self.check_cross_stdlibs()
@permittedKwargs(permitted_kwargs['add_languages'])
@stringArgs
def func_add_languages(self, node, args, kwargs):
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
for lang in sorted(args, key=compilers.sort_clink):
mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
return False
return self.add_languages(args, required)
def get_message_string_arg(self, node):
# reduce arguments again to avoid flattening posargs
(posargs, _) = self.reduce_arguments(node.args)
if len(posargs) != 1:
raise InvalidArguments('Expected 1 argument, got %d' % len(posargs))
arg = posargs[0]
if isinstance(arg, list):
argstr = stringifyUserArguments(arg)
elif isinstance(arg, dict):
argstr = stringifyUserArguments(arg)
elif isinstance(arg, str):
argstr = arg
elif isinstance(arg, int):
argstr = str(arg)
else:
raise InvalidArguments('Function accepts only strings, integers, lists and lists thereof.')
return argstr
@noKwargs
def func_message(self, node, args, kwargs):
argstr = self.get_message_string_arg(node)
mlog.log(mlog.bold('Message:'), argstr)
@FeatureNew('warning', '0.44.0')
@noKwargs
def func_warning(self, node, args, kwargs):
argstr = self.get_message_string_arg(node)
mlog.warning(argstr, location=node)
@noKwargs
def func_error(self, node, args, kwargs):
self.validate_arguments(args, 1, [str])
raise InterpreterException('Problem encountered: ' + args[0])
@noKwargs
def func_exception(self, node, args, kwargs):
self.validate_arguments(args, 0, [])
raise Exception()
def detect_compilers(self, lang, need_cross_compiler):
cross_comp = None
if lang == 'c':
comp = self.environment.detect_c_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_c_compiler(True)
elif lang == 'cpp':
comp = self.environment.detect_cpp_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_cpp_compiler(True)
elif lang == 'objc':
comp = self.environment.detect_objc_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_objc_compiler(True)
elif lang == 'objcpp':
comp = self.environment.detect_objcpp_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_objcpp_compiler(True)
elif lang == 'java':
comp = self.environment.detect_java_compiler()
if need_cross_compiler:
cross_comp = comp # Java is platform independent.
elif lang == 'cs':
comp = self.environment.detect_cs_compiler()
if need_cross_compiler:
cross_comp = comp # C# is platform independent.
elif lang == 'vala':
comp = self.environment.detect_vala_compiler()
if need_cross_compiler:
cross_comp = comp # Vala compiles to platform-independent C
elif lang == 'd':
comp = self.environment.detect_d_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_d_compiler(True)
elif lang == 'rust':
comp = self.environment.detect_rust_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_rust_compiler(True)
elif lang == 'fortran':
comp = self.environment.detect_fortran_compiler(False)
if need_cross_compiler:
cross_comp = self.environment.detect_fortran_compiler(True)
elif lang == 'swift':
comp = self.environment.detect_swift_compiler()
if need_cross_compiler:
raise InterpreterException('Cross compilation with Swift is not working yet.')
# cross_comp = self.environment.detect_fortran_compiler(True)
else:
raise InvalidCode('Tried to use unknown language "%s".' % lang)
comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
self.coredata.compilers[lang] = comp
# Native compiler always exist so always add its options.
new_options = comp.get_options()
if cross_comp is not None:
cross_comp.sanity_check(self.environment.get_scratch_dir(), self.environment)
self.coredata.cross_compilers[lang] = cross_comp
new_options.update(cross_comp.get_options())
optprefix = lang + '_'
for k, o in new_options.items():
if not k.startswith(optprefix):
raise InterpreterException('Internal error, %s has incorrect prefix.' % k)
if k in self.environment.cmd_line_options:
o.set_value(self.environment.cmd_line_options[k])
self.coredata.compiler_options.setdefault(k, o)
# Unlike compiler and linker flags, preprocessor flags are not in
# compiler_options because they are not visible to user.
preproc_flags = comp.get_preproc_flags()
preproc_flags = shlex.split(preproc_flags)
self.coredata.external_preprocess_args.setdefault(lang, preproc_flags)
return comp, cross_comp
def add_languages(self, args, required):
success = True
need_cross_compiler = self.environment.is_cross_build() and self.environment.cross_info.need_cross_compiler()
for lang in sorted(args, key=compilers.sort_clink):
lang = lang.lower()
if lang in self.coredata.compilers:
comp = self.coredata.compilers[lang]
cross_comp = self.coredata.cross_compilers.get(lang, None)
else:
try:
(comp, cross_comp) = self.detect_compilers(lang, need_cross_compiler)
except Exception:
if not required:
mlog.log('Compiler for language', mlog.bold(lang), 'not found.')
success = False
continue
else:
raise
if comp.full_version is not None:
version_string = '(%s %s "%s")' % (comp.id, comp.version, comp.full_version)
else:
version_string = '(%s %s)' % (comp.id, comp.version)
mlog.log('Native', comp.get_display_language(), 'compiler:',
mlog.bold(' '.join(comp.get_exelist())), version_string)
self.build.add_compiler(comp)
if need_cross_compiler:
version_string = '(%s %s)' % (cross_comp.id, cross_comp.version)
mlog.log('Cross', cross_comp.get_display_language(), 'compiler:',
mlog.bold(' '.join(cross_comp.get_exelist())), version_string)
self.build.add_cross_compiler(cross_comp)
if self.environment.is_cross_build() and not need_cross_compiler:
self.build.add_cross_compiler(comp)
self.add_base_options(comp)
return success
def emit_base_options_warnings(self, enabled_opts):
if 'b_bitcode' in enabled_opts:
mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as such as \'b_asneeded\' have been disabled.')
mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.')
def add_base_options(self, compiler):
enabled_opts = []
for optname in compiler.base_options:
if optname in self.coredata.base_options:
continue
oobj = compilers.base_options[optname]
if optname in self.environment.cmd_line_options:
oobj.set_value(self.environment.cmd_line_options[optname])
enabled_opts.append(optname)
self.coredata. base_options[optname] = oobj
self.emit_base_options_warnings(enabled_opts)
def _program_from_file(self, prognames, bins, silent):
for p in prognames:
if hasattr(p, 'held_object'):
p = p.held_object
if isinstance(p, mesonlib.File):
continue # Always points to a local (i.e. self generated) file.
if not isinstance(p, str):
raise InterpreterException('Executable name must be a string')
prog = ExternalProgram.from_bin_list(bins, p)
if prog.found():
return ExternalProgramHolder(prog)
return None
def program_from_cross_file(self, prognames, silent=False):
bins = self.environment.cross_info.config['binaries']
return self._program_from_file(prognames, bins, silent)
def program_from_config_file(self, prognames, silent=False):
bins = self.environment.config_info.binaries
return self._program_from_file(prognames, bins, silent)
def program_from_system(self, args, silent=False):
# Search for scripts relative to current subdir.
# Do not cache found programs because find_program('foobar')
# might give different results when run from different source dirs.
source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
for exename in args:
if isinstance(exename, mesonlib.File):
if exename.is_built:
search_dir = os.path.join(self.environment.get_build_dir(),
exename.subdir)
else:
search_dir = os.path.join(self.environment.get_source_dir(),
exename.subdir)
exename = exename.fname
elif isinstance(exename, str):
search_dir = source_dir
else:
raise InvalidArguments('find_program only accepts strings and '
'files, not {!r}'.format(exename))
extprog = dependencies.ExternalProgram(exename, search_dir=search_dir,
silent=silent)
progobj = ExternalProgramHolder(extprog)
if progobj.found():
return progobj
def program_from_overrides(self, command_names, silent=False):
for name in command_names:
if not isinstance(name, str):
continue
if name in self.build.find_overrides:
exe = self.build.find_overrides[name]
if not silent:
mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
'(overridden: %s)' % exe.description())
return ExternalProgramHolder(exe)
return None
def store_name_lookups(self, command_names):
for name in command_names:
if isinstance(name, str):
self.build.searched_programs.add(name)
def add_find_program_override(self, name, exe):
if name in self.build.searched_programs:
raise InterpreterException('Tried to override finding of executable "%s" which has already been found.'
% name)
if name in self.build.find_overrides:
raise InterpreterException('Tried to override executable "%s" which has already been overridden.'
% name)
self.build.find_overrides[name] = exe
def find_program_impl(self, args, native=False, required=True, silent=True):
if not isinstance(args, list):
args = [args]
progobj = self.program_from_overrides(args, silent=silent)
if progobj is None:
if self.build.environment.is_cross_build() and not native:
progobj = self.program_from_cross_file(args, silent=silent)
else:
progobj = self.program_from_config_file(args, silent=silent)
if progobj is None:
progobj = self.program_from_system(args, silent=silent)
if required and (progobj is None or not progobj.found()):
raise InvalidArguments('Program(s) {!r} not found or not executable'.format(args))
if progobj is None:
return ExternalProgramHolder(dependencies.NonExistingExternalProgram())
# Only store successful lookups
self.store_name_lookups(args)
return progobj
@FeatureNewKwargs('find_program', '0.49.0', ['disabler'])
@disablerIfNotFound
@permittedKwargs(permitted_kwargs['find_program'])
def func_find_program(self, node, args, kwargs):
if not args:
raise InterpreterException('No program name specified.')
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('Program', mlog.bold(' '.join(args)), 'skipped: feature', mlog.bold(feature), 'disabled')
return ExternalProgramHolder(dependencies.NonExistingExternalProgram())
if not isinstance(required, bool):
raise InvalidArguments('"required" argument must be a boolean.')
use_native = kwargs.get('native', False)
if not isinstance(use_native, bool):
raise InvalidArguments('Argument to "native" must be a boolean.')
return self.find_program_impl(args, native=use_native, required=required, silent=False)
def func_find_library(self, node, args, kwargs):
raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
)
def _find_cached_dep(self, name, kwargs):
# Check if we want this as a cross-dep or a native-dep
# FIXME: Not all dependencies support such a distinction right now,
# and we repeat this check inside dependencies that do. We need to
# consolidate this somehow.
is_cross = self.environment.is_cross_build()
if 'native' in kwargs and is_cross:
want_cross = not kwargs['native']
else:
want_cross = is_cross
identifier = dependencies.get_dep_identifier(name, kwargs, want_cross)
cached_dep = None
# Check if we've already searched for and found this dep
if identifier in self.coredata.deps:
cached_dep = self.coredata.deps[identifier]
mlog.log('Dependency', mlog.bold(name),
'found:', mlog.green('YES'), '(cached)')
else:
# Check if exactly the same dep with different version requirements
# was found already.
wanted = identifier[1]
for trial, trial_dep in self.coredata.deps.items():
# trial[1], identifier[1] are the version requirements
if trial[0] != identifier[0] or trial[2:] != identifier[2:]:
continue
found = trial_dep.get_version()
if not wanted or mesonlib.version_compare_many(found, wanted)[0]:
# We either don't care about the version, or our
# version requirements matched the trial dep's version.
cached_dep = trial_dep
break
return identifier, cached_dep
@staticmethod
def check_subproject_version(wanted, found):
if wanted == 'undefined':
return True
if found == 'undefined' or not mesonlib.version_compare_many(found, wanted)[0]:
return False
return True
def get_subproject_dep(self, name, dirname, varname, required):
dep = DependencyHolder(NotFoundDependency(self.environment), self.subproject)
try:
subproject = self.subprojects[dirname]
if subproject.found():
dep = self.subprojects[dirname].get_variable_method([varname], {})
except InvalidArguments as e:
pass
if not isinstance(dep, DependencyHolder):
raise InvalidCode('Fetched variable {!r} in the subproject {!r} is '
'not a dependency object.'.format(varname, dirname))
if not dep.found():
if required:
raise DependencyException('Could not find dependency {} in subproject {}'
''.format(varname, dirname))
# If the dependency is not required, don't raise an exception
subproj_path = os.path.join(self.subproject_dir, dirname)
mlog.log('Dependency', mlog.bold(name), 'from subproject',
mlog.bold(subproj_path), 'found:', mlog.red('NO'))
return dep
def _find_cached_fallback_dep(self, name, dirname, varname, wanted, required):
if dirname not in self.subprojects:
return False
dep = self.get_subproject_dep(name, dirname, varname, required)
if not dep.found():
return dep
found = dep.version_method([], {})
# Don't do a version check if the dependency is not found and not required
if not dep.found_method([], {}) and not required:
subproj_path = os.path.join(self.subproject_dir, dirname)
mlog.log('Dependency', mlog.bold(name), 'from subproject',
mlog.bold(subproj_path), 'found:', mlog.red('NO'), '(cached)')
return dep
if self.check_subproject_version(wanted, found):
subproj_path = os.path.join(self.subproject_dir, dirname)
mlog.log('Dependency', mlog.bold(name), 'from subproject',
mlog.bold(subproj_path), 'found:', mlog.green('YES'), '(cached)')
return dep
if required:
raise DependencyException('Version {} of subproject dependency {} already '
'cached, requested incompatible version {} for '
'dep {}'.format(found, dirname, wanted, name))
return None
def _handle_featurenew_dependencies(self, name):
'Do a feature check on dependencies used by this subproject'
if name == 'mpi':
FeatureNew('MPI Dependency', '0.42.0').use(self.subproject)
elif name == 'pcap':
FeatureNew('Pcap Dependency', '0.42.0').use(self.subproject)
elif name == 'vulkan':
FeatureNew('Vulkan Dependency', '0.42.0').use(self.subproject)
elif name == 'libwmf':
FeatureNew('LibWMF Dependency', '0.44.0').use(self.subproject)
elif name == 'openmp':
FeatureNew('OpenMP Dependency', '0.46.0').use(self.subproject)
@FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
@FeatureNewKwargs('dependency', '0.40.0', ['method'])
@FeatureNewKwargs('dependency', '0.38.0', ['default_options'])
@disablerIfNotFound
@permittedKwargs(permitted_kwargs['dependency'])
def func_dependency(self, node, args, kwargs):
self.validate_arguments(args, 1, [str])
name = args[0]
display_name = name if name else '(anonymous)'
disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
if disabled:
mlog.log('Dependency', mlog.bold(display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
return DependencyHolder(NotFoundDependency(self.environment), self.subproject)
# writing just "dependency('')" is an error, because it can only fail
if name == '' and required and 'fallback' not in kwargs:
raise InvalidArguments('Dependency is both required and not-found')
if '<' in name or '>' in name or '=' in name:
raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
'version\n requirements use the \'version\' keyword argument instead.')
identifier, cached_dep = self._find_cached_dep(name, kwargs)
if cached_dep:
if required and not cached_dep.found():
m = 'Dependency {!r} was already checked and was not found'
raise DependencyException(m.format(display_name))
dep = cached_dep
else:
# If the dependency has already been configured, possibly by
# a higher level project, try to use it first.
if 'fallback' in kwargs:
dirname, varname = self.get_subproject_infos(kwargs)
wanted = kwargs.get('version', 'undefined')
dep = self._find_cached_fallback_dep(name, dirname, varname, wanted, required)
if dep:
return dep
# We need to actually search for this dep
exception = None
dep = NotFoundDependency(self.environment)
# Unless a fallback exists and is forced ...
if self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback and 'fallback' in kwargs:
pass
# ... search for it outside the project
elif name != '':
self._handle_featurenew_dependencies(name)
try:
dep = dependencies.find_external_dependency(name, self.environment, kwargs)
except DependencyException as e:
exception = e
# Search inside the projects list
if not dep.found():
if 'fallback' in kwargs:
if not exception:
exception = DependencyException("fallback for %s not found" % display_name)
fallback_dep = self.dependency_fallback(name, kwargs)
if fallback_dep:
# Never add fallback deps to self.coredata.deps since we
# cannot cache them. They must always be evaluated else
# we won't actually read all the build files.
return fallback_dep
if required:
assert(exception is not None)
raise exception
# Only store found-deps in the cache
if dep.found():
self.coredata.deps[identifier] = dep
return DependencyHolder(dep, self.subproject)
@FeatureNew('disabler', '0.44.0')
@noKwargs
@noPosargs
def func_disabler(self, node, args, kwargs):
return Disabler()
def print_nested_info(self, dependency_name):
message = ['Dependency', mlog.bold(dependency_name), 'not found but it is available in a sub-subproject.\n' +
'To use it in the current project, promote it by going in the project source\n'
'root and issuing']
sprojs = mesonlib.detect_subprojects('subprojects', self.source_root)
if dependency_name not in sprojs:
return
found = sprojs[dependency_name]
if len(found) > 1:
message.append('one of the following commands:')
else:
message.append('the following command:')
command_templ = '\nmeson wrap promote {}'
for l in found:
message.append(mlog.bold(command_templ.format(l[len(self.source_root) + 1:])))
mlog.warning(*message)
def get_subproject_infos(self, kwargs):
fbinfo = kwargs['fallback']
check_stringlist(fbinfo)
if len(fbinfo) != 2:
raise InterpreterException('Fallback info must have exactly two items.')
return fbinfo
def dependency_fallback(self, name, kwargs):
display_name = name if name else '(anonymous)'
if self.coredata.get_builtin_option('wrap_mode') in (WrapMode.nofallback, WrapMode.nodownload):
mlog.log('Not looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback'
'dependencies is disabled.')
return None
elif self.coredata.get_builtin_option('wrap_mode') == WrapMode.forcefallback:
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name), 'because:\nUse of fallback dependencies is forced.')
else:
mlog.log('Looking for a fallback subproject for the dependency',
mlog.bold(display_name))
dirname, varname = self.get_subproject_infos(kwargs)
# Try to execute the subproject
try:
sp_kwargs = {}
try:
sp_kwargs['default_options'] = kwargs['default_options']
except KeyError:
pass
self.do_subproject(dirname, sp_kwargs)
# Invalid code is always an error
except InvalidCode:
raise
# If the subproject execution failed in a non-fatal way, don't raise an
# exception; let the caller handle things.
except Exception as e:
msg = ['Couldn\'t use fallback subproject in',
mlog.bold(os.path.join(self.subproject_dir, dirname)),
'for the dependency', mlog.bold(display_name), '\nReason:']
if isinstance(e, mesonlib.MesonException):
msg.append(e.get_msg_with_context())
else:
msg.append(traceback.format_exc())
mlog.log(*msg)
return None
required = kwargs.get('required', True)
dep = self.get_subproject_dep(name, dirname, varname, required)
if not dep.found():
return dep
subproj_path = os.path.join(self.subproject_dir, dirname)
# Check if the version of the declared dependency matches what we want
if 'version' in kwargs:
wanted = kwargs['version']
found = dep.version_method([], {})
# Don't do a version check if the dependency is not found and not required
if not dep.found_method([], {}) and not required:
subproj_path = os.path.join(self.subproject_dir, dirname)
mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
mlog.bold(subproj_path), 'found:', mlog.red('NO'))
return dep
if not self.check_subproject_version(wanted, found):
mlog.log('Subproject', mlog.bold(subproj_path), 'dependency',
mlog.bold(display_name), 'version is', mlog.bold(found),
'but', mlog.bold(wanted), 'is required.')
return None
mlog.log('Dependency', mlog.bold(display_name), 'from subproject',
mlog.bold(subproj_path), 'found:', mlog.green('YES'))
return dep
@FeatureNewKwargs('executable', '0.42.0', ['implib'])
@permittedKwargs(permitted_kwargs['executable'])
def func_executable(self, node, args, kwargs):
return self.build_target(node, args, kwargs, ExecutableHolder)
@permittedKwargs(permitted_kwargs['static_library'])
def func_static_lib(self, node, args, kwargs):
return self.build_target(node, args, kwargs, StaticLibraryHolder)
@permittedKwargs(permitted_kwargs['shared_library'])
def func_shared_lib(self, node, args, kwargs):
holder = self.build_target(node, args, kwargs, SharedLibraryHolder)
holder.held_object.shared_library_only = True
return holder
@permittedKwargs(permitted_kwargs['both_libraries'])
def func_both_lib(self, node, args, kwargs):
return self.build_both_libraries(node, args, kwargs)
@FeatureNew('shared_module', '0.37.0')
@permittedKwargs(permitted_kwargs['shared_module'])
def func_shared_module(self, node, args, kwargs):
return self.build_target(node, args, kwargs, SharedModuleHolder)
@permittedKwargs(permitted_kwargs['library'])
def func_library(self, node, args, kwargs):
return self.build_library(node, args, kwargs)
@permittedKwargs(permitted_kwargs['jar'])
def func_jar(self, node, args, kwargs):
return self.build_target(node, args, kwargs, JarHolder)
@FeatureNewKwargs('build_target', '0.40.0', ['link_whole', 'override_options'])
@permittedKwargs(permitted_kwargs['build_target'])
def func_build_target(self, node, args, kwargs):
if 'target_type' not in kwargs:
raise InterpreterException('Missing target_type keyword argument')
target_type = kwargs.pop('target_type')
if target_type == 'executable':
return self.build_target(node, args, kwargs, ExecutableHolder)
elif target_type == 'shared_library':
return self.build_target(node, args, kwargs, SharedLibraryHolder)
elif target_type == 'static_library':
return self.build_target(node, args, kwargs, StaticLibraryHolder)
elif target_type == 'both_libraries':
return self.build_both_libraries(node, args, kwargs)
elif target_type == 'library':
return self.build_library(node, args, kwargs)
elif target_type == 'jar':
return self.build_target(node, args, kwargs, JarHolder)
else:
raise InterpreterException('Unknown target_type.')
@permittedKwargs(permitted_kwargs['vcs_tag'])
def func_vcs_tag(self, node, args, kwargs):
if 'input' not in kwargs or 'output' not in kwargs:
raise InterpreterException('Keyword arguments input and output must exist')
if 'fallback' not in kwargs:
FeatureNew('Optional fallback in vcs_tag', '0.41.0').use(self.subproject)
fallback = kwargs.pop('fallback', self.project_version)
if not isinstance(fallback, str):
raise InterpreterException('Keyword argument fallback must be a string.')
replace_string = kwargs.pop('replace_string', '@VCS_TAG@')
regex_selector = '(.*)' # default regex selector for custom command: use complete output
vcs_cmd = kwargs.get('command', None)
if vcs_cmd and not isinstance(vcs_cmd, list):
vcs_cmd = [vcs_cmd]
source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
if vcs_cmd:
# Is the command an executable in path or maybe a script in the source tree?
vcs_cmd[0] = shutil.which(vcs_cmd[0]) or os.path.join(source_dir, vcs_cmd[0])
else:
vcs = mesonlib.detect_vcs(source_dir)
if vcs:
mlog.log('Found %s repository at %s' % (vcs['name'], vcs['wc_dir']))
vcs_cmd = vcs['get_rev'].split()
regex_selector = vcs['rev_regex']
else:
vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
# vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
kwargs['command'] = self.environment.get_build_command() + \
['--internal',
'vcstagger',
'@INPUT0@',
'@OUTPUT0@',
fallback,
source_dir,
replace_string,
regex_selector] + vcs_cmd
kwargs.setdefault('build_by_default', True)
kwargs.setdefault('build_always_stale', True)
return self._func_custom_target_impl(node, [kwargs['output']], kwargs)
@FeatureNew('subdir_done', '0.46.0')
@stringArgs
def func_subdir_done(self, node, args, kwargs):
if len(kwargs) > 0:
raise InterpreterException('exit does not take named arguments')
if len(args) > 0:
raise InterpreterException('exit does not take any arguments')
raise SubdirDoneRequest()
@stringArgs
@FeatureNewKwargs('custom_target', '0.48.0', ['console'])
@FeatureNewKwargs('custom_target', '0.47.0', ['install_mode', 'build_always_stale'])
@FeatureNewKwargs('custom_target', '0.40.0', ['build_by_default'])
@permittedKwargs(permitted_kwargs['custom_target'])
def func_custom_target(self, node, args, kwargs):
if len(args) != 1:
raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')
if 'depfile' in kwargs and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
FeatureNew('substitutions in custom_target depfile', '0.47.0').use(self.subproject)
return self._func_custom_target_impl(node, args, kwargs)
def _func_custom_target_impl(self, node, args, kwargs):
'Implementation-only, without FeatureNew checks, for internal use'
name = args[0]
kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
if 'input' in kwargs:
try:
kwargs['input'] = self.source_strings_to_files(extract_as_list(kwargs, 'input'))
except mesonlib.MesonException:
mlog.warning('''Custom target input \'%s\' can\'t be converted to File object(s).
This will become a hard error in the future.''' % kwargs['input'])
tg = CustomTargetHolder(build.CustomTarget(name, self.subdir, self.subproject, kwargs), self)
self.add_target(name, tg.held_object)
return tg
@permittedKwargs(permitted_kwargs['run_target'])
def func_run_target(self, node, args, kwargs):
if len(args) > 1:
raise InvalidCode('Run_target takes only one positional argument: the target name.')
elif len(args) == 1:
if 'command' not in kwargs:
raise InterpreterException('Missing "command" keyword argument')
all_args = extract_as_list(kwargs, 'command')
deps = extract_as_list(kwargs, 'depends', unholder=True)
else:
raise InterpreterException('Run_target needs at least one positional argument.')
cleaned_args = []
for i in listify(all_args, unholder=True):
if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, dependencies.ExternalProgram, mesonlib.File)):
mlog.debug('Wrong type:', str(i))
raise InterpreterException('Invalid argument to run_target.')
if isinstance(i, dependencies.ExternalProgram) and not i.found():
raise InterpreterException('Tried to use non-existing executable {!r}'.format(i.name))
cleaned_args.append(i)
name = args[0]
if not isinstance(name, str):
raise InterpreterException('First argument must be a string.')
cleaned_deps = []
for d in deps:
if not isinstance(d, (build.BuildTarget, build.CustomTarget)):
raise InterpreterException('Depends items must be build targets.')
cleaned_deps.append(d)
command = cleaned_args[0]
cmd_args = cleaned_args[1:]
tg = RunTargetHolder(name, command, cmd_args, cleaned_deps, self.subdir, self.subproject)
self.add_target(name, tg.held_object)
return tg
@permittedKwargs(permitted_kwargs['generator'])
def func_generator(self, node, args, kwargs):
gen = GeneratorHolder(self, args, kwargs)
self.generators.append(gen)
return gen
@permittedKwargs(permitted_kwargs['benchmark'])
def func_benchmark(self, node, args, kwargs):
self.add_test(node, args, kwargs, False)
@FeatureNewKwargs('test', '0.46.0', ['depends'])
@permittedKwargs(permitted_kwargs['test'])
def func_test(self, node, args, kwargs):
self.add_test(node, args, kwargs, True)
def unpack_env_kwarg(self, kwargs):
envlist = kwargs.get('env', EnvironmentVariablesHolder())
if isinstance(envlist, EnvironmentVariablesHolder):
env = envlist.held_object
else:
envlist = listify(envlist)
# Convert from array to environment object
env = EnvironmentVariablesHolder()
for e in envlist:
if '=' not in e:
raise InterpreterException('Env var definition must be of type key=val.')
(k, val) = e.split('=', 1)
k = k.strip()
val = val.strip()
if ' ' in k:
raise InterpreterException('Env var key must not have spaces in it.')
env.set_method([k, val], {})
env = env.held_object
return env
def add_test(self, node, args, kwargs, is_base_test):
if len(args) != 2:
raise InterpreterException('Incorrect number of arguments')
if not isinstance(args[0], str):
raise InterpreterException('First argument of test must be a string.')
exe = args[1]
if not isinstance(exe, (ExecutableHolder, JarHolder, ExternalProgramHolder)):
if isinstance(exe, mesonlib.File):
exe = self.func_find_program(node, args[1], {})
else:
raise InterpreterException('Second argument must be executable.')
par = kwargs.get('is_parallel', True)
if not isinstance(par, bool):
raise InterpreterException('Keyword argument is_parallel must be a boolean.')
cmd_args = extract_as_list(kwargs, 'args', unholder=True)
for i in cmd_args:
if not isinstance(i, (str, mesonlib.File, build.Target)):
raise InterpreterException('Command line arguments must be strings, files or targets.')
env = self.unpack_env_kwarg(kwargs)
should_fail = kwargs.get('should_fail', False)
if not isinstance(should_fail, bool):
raise InterpreterException('Keyword argument should_fail must be a boolean.')
timeout = kwargs.get('timeout', 30)
if 'workdir' in kwargs:
workdir = kwargs['workdir']
if not isinstance(workdir, str):
raise InterpreterException('Workdir keyword argument must be a string.')
if not os.path.isabs(workdir):
raise InterpreterException('Workdir keyword argument must be an absolute path.')
else:
workdir = None
if not isinstance(timeout, int):
raise InterpreterException('Timeout must be an integer.')
suite = []
prj = self.subproject if self.is_subproject() else self.build.project_name
for s in mesonlib.stringlistify(kwargs.get('suite', '')):
if len(s) > 0:
s = ':' + s
suite.append(prj.replace(' ', '_').replace(':', '_') + s)
depends = extract_as_list(kwargs, 'depends', unholder=True)
for dep in depends:
if not isinstance(dep, (build.CustomTarget, build.BuildTarget)):
raise InterpreterException('Depends items must be build targets.')
t = Test(args[0], prj, suite, exe.held_object, depends, par, cmd_args,
env, should_fail, timeout, workdir)
if is_base_test:
self.build.tests.append(t)
mlog.debug('Adding test', mlog.bold(args[0], True))
else:
self.build.benchmarks.append(t)
mlog.debug('Adding benchmark', mlog.bold(args[0], True))
@FeatureNewKwargs('install_headers', '0.47.0', ['install_mode'])
@permittedKwargs(permitted_kwargs['install_headers'])
def func_install_headers(self, node, args, kwargs):
source_files = self.source_strings_to_files(args)
kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
h = Headers(source_files, kwargs)
self.build.headers.append(h)
return h
@FeatureNewKwargs('install_man', '0.47.0', ['install_mode'])
@permittedKwargs(permitted_kwargs['install_man'])
def func_install_man(self, node, args, kwargs):
fargs = self.source_strings_to_files(args)
kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
m = Man(fargs, kwargs)
self.build.man.append(m)
return m
@FeatureNewKwargs('subdir', '0.44.0', ['if_found'])
@permittedKwargs(permitted_kwargs['subdir'])
def func_subdir(self, node, args, kwargs):
self.validate_arguments(args, 1, [str])
mesonlib.check_direntry_issues(args)
if '..' in args[0]:
raise InvalidArguments('Subdir contains ..')
if self.subdir == '' and args[0] == self.subproject_dir:
raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
if self.subdir == '' and args[0].startswith('meson-'):
raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
for i in mesonlib.extract_as_list(kwargs, 'if_found'):
if not hasattr(i, 'found_method'):
raise InterpreterException('Object used in if_found does not have a found method.')
if not i.found_method([], {}):
return
prev_subdir = self.subdir
subdir = os.path.join(prev_subdir, args[0])
if os.path.isabs(subdir):
raise InvalidArguments('Subdir argument must be a relative path.')
absdir = os.path.join(self.environment.get_source_dir(), subdir)
symlinkless_dir = os.path.realpath(absdir)
if symlinkless_dir in self.visited_subdirs:
raise InvalidArguments('Tried to enter directory "%s", which has already been visited.'
% subdir)
self.visited_subdirs[symlinkless_dir] = True
self.subdir = subdir
os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
buildfilename = os.path.join(self.subdir, environment.build_filename)
self.build_def_files.append(buildfilename)
absname = os.path.join(self.environment.get_source_dir(), buildfilename)
if not os.path.isfile(absname):
self.subdir = prev_subdir
raise InterpreterException('Non-existent build file {!r}'.format(buildfilename))
with open(absname, encoding='utf8') as f:
code = f.read()
assert(isinstance(code, str))
try:
codeblock = mparser.Parser(code, self.subdir).parse()
except mesonlib.MesonException as me:
me.file = buildfilename
raise me
try:
self.evaluate_codeblock(codeblock)
except SubdirDoneRequest:
pass
self.subdir = prev_subdir
def _get_kwarg_install_mode(self, kwargs):
if kwargs.get('install_mode', None) is None:
return None
install_mode = []
mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))
for m in mode:
# We skip any arguments that are set to `false`
if m is False:
m = None
install_mode.append(m)
if len(install_mode) > 3:
raise InvalidArguments('Keyword argument install_mode takes at '
'most 3 arguments.')
if len(install_mode) > 0 and install_mode[0] is not None and \
not isinstance(install_mode[0], str):
raise InvalidArguments('Keyword argument install_mode requires the '
'permissions arg to be a string or false')
return FileMode(*install_mode)
@FeatureNewKwargs('install_data', '0.46.0', ['rename'])
@FeatureNewKwargs('install_data', '0.38.0', ['install_mode'])
@permittedKwargs(permitted_kwargs['install_data'])
def func_install_data(self, node, args, kwargs):
kwsource = mesonlib.stringlistify(kwargs.get('sources', []))
raw_sources = args + kwsource
sources = []
source_strings = []
for s in raw_sources:
if isinstance(s, mesonlib.File):
sources.append(s)
else:
source_strings.append(s)
sources += self.source_strings_to_files(source_strings)
install_dir = kwargs.get('install_dir', None)
if not isinstance(install_dir, (str, type(None))):
raise InvalidArguments('Keyword argument install_dir not a string.')
install_mode = self._get_kwarg_install_mode(kwargs)
rename = kwargs.get('rename', None)
data = DataHolder(build.Data(sources, install_dir, install_mode, rename))
self.build.data.append(data.held_object)
return data
@FeatureNewKwargs('install_subdir', '0.42.0', ['exclude_files', 'exclude_directories'])
@FeatureNewKwargs('install_subdir', '0.38.0', ['install_mode'])
@permittedKwargs(permitted_kwargs['install_subdir'])
@stringArgs
def func_install_subdir(self, node, args, kwargs):
if len(args) != 1:
raise InvalidArguments('Install_subdir requires exactly one argument.')
subdir = args[0]
if 'install_dir' not in kwargs:
raise InvalidArguments('Missing keyword argument install_dir')
install_dir = kwargs['install_dir']
if not isinstance(install_dir, str):
raise InvalidArguments('Keyword argument install_dir not a string.')
if 'strip_directory' in kwargs:
if not isinstance(kwargs['strip_directory'], bool):
raise InterpreterException('"strip_directory" keyword must be a boolean.')
strip_directory = kwargs['strip_directory']
else:
strip_directory = False
if 'exclude_files' in kwargs:
exclude = extract_as_list(kwargs, 'exclude_files')
for f in exclude:
if not isinstance(f, str):
raise InvalidArguments('Exclude argument not a string.')
elif os.path.isabs(f):
raise InvalidArguments('Exclude argument cannot be absolute.')
exclude_files = set(exclude)
else:
exclude_files = set()
if 'exclude_directories' in kwargs:
exclude = extract_as_list(kwargs, 'exclude_directories')
for d in exclude:
if not isinstance(d, str):
raise InvalidArguments('Exclude argument not a string.')
elif os.path.isabs(d):
raise InvalidArguments('Exclude argument cannot be absolute.')
exclude_directories = set(exclude)
else:
exclude_directories = set()
exclude = (exclude_files, exclude_directories)
install_mode = self._get_kwarg_install_mode(kwargs)
idir = InstallDir(self.subdir, subdir, install_dir, install_mode, exclude, strip_directory)
self.build.install_dirs.append(idir)
return idir
@FeatureNewKwargs('configure_file', '0.47.0', ['copy', 'output_format', 'install_mode', 'encoding'])
@FeatureNewKwargs('configure_file', '0.46.0', ['format'])
@FeatureNewKwargs('configure_file', '0.41.0', ['capture'])
@permittedKwargs(permitted_kwargs['configure_file'])
def func_configure_file(self, node, args, kwargs):
if len(args) > 0:
raise InterpreterException("configure_file takes only keyword arguments.")
if 'output' not in kwargs:
raise InterpreterException('Required keyword argument "output" not defined.')
actions = set(['configuration', 'command', 'copy']).intersection(kwargs.keys())
if len(actions) == 0:
raise InterpreterException('Must specify an action with one of these '
'keyword arguments: \'configuration\', '
'\'command\', or \'copy\'.')
elif len(actions) == 2:
raise InterpreterException('Must not specify both {!r} and {!r} '
'keyword arguments since they are '
'mutually exclusive.'.format(*actions))
elif len(actions) == 3:
raise InterpreterException('Must specify one of {!r}, {!r}, and '
'{!r} keyword arguments since they are '
'mutually exclusive.'.format(*actions))
if 'capture' in kwargs:
if not isinstance(kwargs['capture'], bool):
raise InterpreterException('"capture" keyword must be a boolean.')
if 'command' not in kwargs:
raise InterpreterException('"capture" keyword requires "command" keyword.')
if 'format' in kwargs:
fmt = kwargs['format']
if not isinstance(fmt, str):
raise InterpreterException('"format" keyword must be a string.')
else:
fmt = 'meson'
if fmt not in ('meson', 'cmake', 'cmake@'):
raise InterpreterException('"format" possible values are "meson", "cmake" or "cmake@".')
if 'output_format' in kwargs:
output_format = kwargs['output_format']
if not isinstance(output_format, str):
raise InterpreterException('"output_format" keyword must be a string.')
else:
output_format = 'c'
if output_format not in ('c', 'nasm'):
raise InterpreterException('"format" possible values are "c" or "nasm".')
# Validate input
inputfile = None
ifile_abs = None
if 'input' in kwargs:
inputfile = kwargs['input']
if isinstance(inputfile, list):
if len(inputfile) != 1:
m = "Keyword argument 'input' requires exactly one file"
raise InterpreterException(m)
inputfile = inputfile[0]
if not isinstance(inputfile, (str, mesonlib.File)):
raise InterpreterException('Input must be a string or a file')
if isinstance(inputfile, str):
inputfile = mesonlib.File.from_source_file(self.environment.source_dir,
self.subdir, inputfile)
ifile_abs = inputfile.absolute_path(self.environment.source_dir,
self.environment.build_dir)
elif 'command' in kwargs and '@INPUT@' in kwargs['command']:
raise InterpreterException('@INPUT@ used as command argument, but no input file specified.')
# Validate output
output = kwargs['output']
if not isinstance(output, str):
raise InterpreterException('Output file name must be a string')
if ifile_abs:
values = mesonlib.get_filenames_templates_dict([ifile_abs], None)
outputs = mesonlib.substitute_values([output], values)
output = outputs[0]
ofile_rpath = os.path.join(self.subdir, output)
if ofile_rpath in self.configure_file_outputs:
mesonbuildfile = os.path.join(self.subdir, 'meson.build')
current_call = "{}:{}".format(mesonbuildfile, self.current_lineno)
first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
else:
self.configure_file_outputs[ofile_rpath] = self.current_lineno
if os.path.dirname(output) != '':
raise InterpreterException('Output file name must not contain a subdirectory.')
(ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
# Perform the appropriate action
if 'configuration' in kwargs:
conf = kwargs['configuration']
if isinstance(conf, dict):
cdata = ConfigurationDataHolder(self.subproject)
for k, v in conf.items():
cdata.set_method([k, v], {})
conf = cdata
elif not isinstance(conf, ConfigurationDataHolder):
raise InterpreterException('Argument "configuration" is not of type configuration_data')
mlog.log('Configuring', mlog.bold(output), 'using configuration')
if inputfile is not None:
os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
file_encoding = kwargs.setdefault('encoding', 'utf-8')
missing_variables, confdata_useless = \
mesonlib.do_conf_file(ifile_abs, ofile_abs, conf.held_object,
fmt, file_encoding)
if missing_variables:
var_list = ", ".join(map(repr, sorted(missing_variables)))
mlog.warning(
"The variable(s) %s in the input file '%s' are not "
"present in the given configuration data." % (
var_list, inputfile), location=node)
if confdata_useless:
ifbase = os.path.basename(ifile_abs)
mlog.warning('Got an empty configuration_data() object and found no '
'substitutions in the input file {!r}. If you want to '
'copy a file to the build dir, use the \'copy:\' keyword '
'argument added in 0.47.0'.format(ifbase), location=node)
else:
mesonlib.dump_conf_header(ofile_abs, conf.held_object, output_format)
conf.mark_used()
elif 'command' in kwargs:
# We use absolute paths for input and output here because the cwd
# that the command is run from is 'unspecified', so it could change.
# Currently it's builddir/subdir for in_builddir else srcdir/subdir.
if ifile_abs:
values = mesonlib.get_filenames_templates_dict([ifile_abs], [ofile_abs])
else:
values = mesonlib.get_filenames_templates_dict(None, [ofile_abs])
# Substitute @INPUT@, @OUTPUT@, etc here.
cmd = mesonlib.substitute_values(kwargs['command'], values)
mlog.log('Configuring', mlog.bold(output), 'with command')
res = self.run_command_impl(node, cmd, {}, True)
if res.returncode != 0:
raise InterpreterException('Running configure command failed.\n%s\n%s' %
(res.stdout, res.stderr))
if 'capture' in kwargs and kwargs['capture']:
dst_tmp = ofile_abs + '~'
file_encoding = kwargs.setdefault('encoding', 'utf-8')
with open(dst_tmp, 'w', encoding=file_encoding) as f:
f.writelines(res.stdout)
if ifile_abs:
shutil.copymode(ifile_abs, dst_tmp)
mesonlib.replace_if_different(ofile_abs, dst_tmp)
elif 'copy' in kwargs:
os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
shutil.copyfile(ifile_abs, ofile_abs)
shutil.copymode(ifile_abs, ofile_abs)
else:
# Not reachable
raise AssertionError
# If the input is a source file, add it to the list of files that we
# need to reconfigure on when they change. FIXME: Do the same for
# files() objects in the command: kwarg.
if inputfile and not inputfile.is_built:
# Normalize the path of the conffile (relative to the
# source root) to avoid duplicates. This is especially
# important to convert '/' to '\' on Windows
conffile = os.path.normpath(inputfile.relative_name())
if conffile not in self.build_def_files:
self.build_def_files.append(conffile)
# Install file if requested, we check for the empty string
# for backwards compatibility. That was the behaviour before
# 0.45.0 so preserve it.
idir = kwargs.get('install_dir', None)
if isinstance(idir, str) and idir:
cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
install_mode = self._get_kwarg_install_mode(kwargs)
self.build.data.append(build.Data([cfile], idir, install_mode))
return mesonlib.File.from_built_file(self.subdir, output)
@permittedKwargs(permitted_kwargs['include_directories'])
@stringArgs
def func_include_directories(self, node, args, kwargs):
return self.build_incdir_object(args, kwargs.get('is_system', False))
def build_incdir_object(self, incdir_strings, is_system=False):
if not isinstance(is_system, bool):
raise InvalidArguments('Is_system must be boolean.')
src_root = self.environment.get_source_dir()
build_root = self.environment.get_build_dir()
absbase_src = os.path.join(src_root, self.subdir)
absbase_build = os.path.join(build_root, self.subdir)
for a in incdir_strings:
if a.startswith(src_root):
raise InvalidArguments('''Tried to form an absolute path to a source dir. You should not do that but use
relative paths instead.
To get include path to any directory relative to the current dir do
incdir = include_directories(dirname)
After this incdir will contain both the current source dir as well as the
corresponding build dir. It can then be used in any subdirectory and
Meson will take care of all the busywork to make paths work.
Dirname can even be '.' to mark the current directory. Though you should
remember that the current source and build directories are always
put in the include directories by default so you only need to do
include_directories('.') if you intend to use the result in a
different subdirectory.
''')
absdir_src = os.path.join(absbase_src, a)
absdir_build = os.path.join(absbase_build, a)
if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
raise InvalidArguments('Include dir %s does not exist.' % a)
i = IncludeDirsHolder(build.IncludeDirs(self.subdir, incdir_strings, is_system))
return i
@permittedKwargs(permitted_kwargs['add_test_setup'])
@stringArgs
def func_add_test_setup(self, node, args, kwargs):
if len(args) != 1:
raise InterpreterException('Add_test_setup needs one argument for the setup name.')
setup_name = args[0]
if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
raise InterpreterException('Setup name may only contain alphanumeric characters.')
if ":" not in setup_name:
setup_name = (self.subproject if self.subproject else self.build.project_name) + ":" + setup_name
try:
inp = extract_as_list(kwargs, 'exe_wrapper', unholder=True)
exe_wrapper = []
for i in inp:
if isinstance(i, str):
exe_wrapper.append(i)
elif isinstance(i, dependencies.ExternalProgram):
if not i.found():
raise InterpreterException('Tried to use non-found executable.')
exe_wrapper += i.get_command()
else:
raise InterpreterException('Exe wrapper can only contain strings or external binaries.')
except KeyError:
exe_wrapper = None
gdb = kwargs.get('gdb', False)
if not isinstance(gdb, bool):
raise InterpreterException('Gdb option must be a boolean')
timeout_multiplier = kwargs.get('timeout_multiplier', 1)
if not isinstance(timeout_multiplier, int):
raise InterpreterException('Timeout multiplier must be a number.')
is_default = kwargs.get('is_default', False)
if not isinstance(is_default, bool):
raise InterpreterException('is_default option must be a boolean')
if is_default:
if self.build.test_setup_default_name is not None:
raise InterpreterException('\'%s\' is already set as default. '
'is_default can be set to true only once' % self.build.test_setup_default_name)
self.build.test_setup_default_name = setup_name
env = self.unpack_env_kwarg(kwargs)
self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper=exe_wrapper,
gdb=gdb,
timeout_multiplier=timeout_multiplier,
env=env)
def get_argdict_on_crossness(self, native_dict, cross_dict, kwargs):
for_native = kwargs.get('native', not self.environment.is_cross_build())
if not isinstance(for_native, bool):
raise InterpreterException('Keyword native must be a boolean.')
if for_native:
return native_dict
else:
return cross_dict
@permittedKwargs(permitted_kwargs['add_global_arguments'])
@stringArgs
def func_add_global_arguments(self, node, args, kwargs):
argdict = self.get_argdict_on_crossness(self.build.global_args,
self.build.cross_global_args,
kwargs)
self.add_global_arguments(node, argdict, args, kwargs)
@permittedKwargs(permitted_kwargs['add_global_link_arguments'])
@stringArgs
def func_add_global_link_arguments(self, node, args, kwargs):
argdict = self.get_argdict_on_crossness(self.build.global_link_args,
self.build.cross_global_link_args,
kwargs)
self.add_global_arguments(node, argdict, args, kwargs)
@permittedKwargs(permitted_kwargs['add_project_arguments'])
@stringArgs
def func_add_project_arguments(self, node, args, kwargs):
argdict = self.get_argdict_on_crossness(self.build.projects_args,
self.build.cross_projects_args,
kwargs)
self.add_project_arguments(node, argdict, args, kwargs)
@permittedKwargs(permitted_kwargs['add_project_link_arguments'])
@stringArgs
def func_add_project_link_arguments(self, node, args, kwargs):
argdict = self.get_argdict_on_crossness(self.build.projects_link_args,
self.build.cross_projects_link_args, kwargs)
self.add_project_arguments(node, argdict, args, kwargs)
def add_global_arguments(self, node, argsdict, args, kwargs):
if self.is_subproject():
msg = 'Function \'{}\' cannot be used in subprojects because ' \
'there is no way to make that reliable.\nPlease only call ' \
'this if is_subproject() returns false. Alternatively, ' \
'define a variable that\ncontains your language-specific ' \
'arguments and add it to the appropriate *_args kwarg ' \
'in each target.'.format(node.func_name)
raise InvalidCode(msg)
frozen = self.project_args_frozen or self.global_args_frozen
self.add_arguments(node, argsdict, frozen, args, kwargs)
def add_project_arguments(self, node, argsdict, args, kwargs):
if self.subproject not in argsdict:
argsdict[self.subproject] = {}
self.add_arguments(node, argsdict[self.subproject],
self.project_args_frozen, args, kwargs)
def add_arguments(self, node, argsdict, args_frozen, args, kwargs):
if args_frozen:
msg = 'Tried to use \'{}\' after a build target has been declared.\n' \
'This is not permitted. Please declare all ' \
'arguments before your targets.'.format(node.func_name)
raise InvalidCode(msg)
if 'language' not in kwargs:
raise InvalidCode('Missing language definition in {}'.format(node.func_name))
for lang in mesonlib.stringlistify(kwargs['language']):
lang = lang.lower()
argsdict[lang] = argsdict.get(lang, []) + args
@noKwargs
@noPosargs
def func_environment(self, node, args, kwargs):
return EnvironmentVariablesHolder()
@stringArgs
@noKwargs
def func_join_paths(self, node, args, kwargs):
return self.join_path_strings(args)
def run(self):
super().run()
mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
FeatureNew.report(self.subproject)
FeatureDeprecated.report(self.subproject)
if not self.is_subproject():
self.print_extra_warnings()
def print_extra_warnings(self):
for c in self.build.compilers.values():
if c.get_id() == 'clang':
self.check_clang_asan_lundef()
break
def check_clang_asan_lundef(self):
if 'b_lundef' not in self.coredata.base_options:
return
if 'b_sanitize' not in self.coredata.base_options:
return
if (self.coredata.base_options['b_lundef'].value and
self.coredata.base_options['b_sanitize'].value != 'none'):
mlog.warning('''Trying to use {} sanitizer on Clang with b_lundef.
This will probably not work.
Try setting b_lundef to false instead.'''.format(self.coredata.base_options['b_sanitize'].value))
def evaluate_subproject_info(self, path_from_source_root, subproject_dirname):
depth = 0
subproj_name = ''
segs = PurePath(path_from_source_root).parts
segs_spd = PurePath(subproject_dirname).parts
while segs and segs[0] == segs_spd[0]:
if len(segs_spd) == 1:
subproj_name = segs[1]
segs = segs[2:]
depth += 1
else:
segs_spd = segs_spd[1:]
segs = segs[1:]
return (depth, subproj_name)
# Check that the indicated file is within the same subproject
# as we currently are. This is to stop people doing
# nasty things like:
#
# f = files('../../master_src/file.c')
#
# Note that this is validated only when the file
# object is generated. The result can be used in a different
# subproject than it is defined in (due to e.g. a
# declare_dependency).
def validate_within_subproject(self, subdir, fname):
norm = os.path.normpath(os.path.join(subdir, fname))
if os.path.isabs(norm):
if not norm.startswith(self.environment.source_dir):
# Grabbing files outside the source tree is ok.
# This is for vendor stuff like:
#
# /opt/vendorsdk/src/file_with_license_restrictions.c
return
norm = os.path.relpath(norm, self.environment.source_dir)
assert(not os.path.isabs(norm))
(num_sps, sproj_name) = self.evaluate_subproject_info(norm, self.subproject_dir)
plain_filename = os.path.basename(norm)
if num_sps == 0:
if not self.is_subproject():
return
raise InterpreterException('Sandbox violation: Tried to grab file %s from a different subproject.' % plain_filename)
if num_sps > 1:
raise InterpreterException('Sandbox violation: Tried to grab file %s from a nested subproject.' % plain_filename)
if sproj_name != self.subproject_directory_name:
raise InterpreterException('Sandbox violation: Tried to grab file %s from a different subproject.' % plain_filename)
def source_strings_to_files(self, sources):
results = []
mesonlib.check_direntry_issues(sources)
if not isinstance(sources, list):
sources = [sources]
for s in sources:
if isinstance(s, (mesonlib.File, GeneratedListHolder,
TargetHolder, CustomTargetIndexHolder)):
pass
elif isinstance(s, str):
self.validate_within_subproject(self.subdir, s)
s = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s)
else:
raise InterpreterException('Source item is {!r} instead of '
'string or File-type object'.format(s))
results.append(s)
return results
def add_target(self, name, tobj):
if name == '':
raise InterpreterException('Target name must not be empty.')
if name.strip() == '':
raise InterpreterException('Target name must not consist only of whitespace.')
if name.startswith('meson-'):
raise InvalidArguments("Target names starting with 'meson-' are reserved "
"for Meson's internal use. Please rename.")
if name in coredata.forbidden_target_names:
raise InvalidArguments("Target name '%s' is reserved for Meson's "
"internal use. Please rename." % name)
# To permit an executable and a shared library to have the
# same name, such as "foo.exe" and "libfoo.a".
idname = tobj.get_id()
if idname in self.build.targets:
raise InvalidCode('Tried to create target "%s", but a target of that name already exists.' % name)
self.build.targets[idname] = tobj
if idname not in self.coredata.target_guids:
self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
@FeatureNew('both_libraries', '0.46.0')
def build_both_libraries(self, node, args, kwargs):
shared_holder = self.build_target(node, args, kwargs, SharedLibraryHolder)
# Check if user forces non-PIC static library.
pic = True
if 'pic' in kwargs:
pic = kwargs['pic']
elif 'b_staticpic' in self.environment.coredata.base_options:
pic = self.environment.coredata.base_options['b_staticpic'].value
if pic:
# Exclude sources from args and kwargs to avoid building them twice
static_args = [args[0]]
static_kwargs = kwargs.copy()
static_kwargs['sources'] = []
static_kwargs['objects'] = shared_holder.held_object.extract_all_objects()
else:
static_args = args
static_kwargs = kwargs
static_holder = self.build_target(node, static_args, static_kwargs, StaticLibraryHolder)
return BothLibrariesHolder(shared_holder, static_holder, self)
def build_library(self, node, args, kwargs):
default_library = self.coredata.get_builtin_option('default_library')
if default_library == 'shared':
return self.build_target(node, args, kwargs, SharedLibraryHolder)
elif default_library == 'static':
return self.build_target(node, args, kwargs, StaticLibraryHolder)
elif default_library == 'both':
return self.build_both_libraries(node, args, kwargs)
else:
raise InterpreterException('Unknown default_library value: %s.', default_library)
def build_target(self, node, args, kwargs, targetholder):
@FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories'])
@FeatureNewKwargs('build target', '0.41.0', ['rust_args'])
@FeatureNewKwargs('build target', '0.40.0', ['build_by_default'])
@FeatureNewKwargs('build target', '0.48.0', ['gnu_symbol_visibility'])
def build_target_decorator_caller(self, node, args, kwargs):
return True
build_target_decorator_caller(self, node, args, kwargs)
if not args:
raise InterpreterException('Target does not have a name.')
name = args[0]
sources = listify(args[1:])
if self.environment.is_cross_build():
if kwargs.get('native', False):
is_cross = False
else:
is_cross = True
else:
is_cross = False
if 'sources' in kwargs:
sources += listify(kwargs['sources'])
sources = self.source_strings_to_files(sources)
objs = extract_as_list(kwargs, 'objects')
kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
if 'extra_files' in kwargs:
ef = extract_as_list(kwargs, 'extra_files')
kwargs['extra_files'] = self.source_strings_to_files(ef)
self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
if targetholder is ExecutableHolder:
targetclass = build.Executable
elif targetholder is SharedLibraryHolder:
targetclass = build.SharedLibrary
elif targetholder is SharedModuleHolder:
targetclass = build.SharedModule
elif targetholder is StaticLibraryHolder:
targetclass = build.StaticLibrary
elif targetholder is JarHolder:
targetclass = build.Jar
else:
mlog.debug('Unknown target type:', str(targetholder))
raise RuntimeError('Unreachable code')
self.kwarg_strings_to_includedirs(kwargs)
# Filter out kwargs from other target types. For example 'soversion'
# passed to library() when default_library == 'static'.
kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
target = targetclass(name, self.subdir, self.subproject, is_cross, sources, objs, self.environment, kwargs)
if is_cross:
self.add_cross_stdlib_info(target)
l = targetholder(target, self)
self.add_target(name, l.held_object)
self.project_args_frozen = True
return l
def kwarg_strings_to_includedirs(self, kwargs):
if 'd_import_dirs' in kwargs:
items = mesonlib.extract_as_list(kwargs, 'd_import_dirs')
cleaned_items = []
for i in items:
if isinstance(i, str):
# BW compatibility. This was permitted so we must support it
# for a few releases so people can transition to "correct"
# path declarations.
if os.path.normpath(i).startswith(self.environment.get_source_dir()):
mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
This will become a hard error in the future.''')
i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
i = self.build_incdir_object([i])
cleaned_items.append(i)
kwargs['d_import_dirs'] = cleaned_items
def get_used_languages(self, target):
result = {}
for i in target.sources:
for lang, c in self.build.compilers.items():
if c.can_compile(i):
result[lang] = True
break
return result
def add_cross_stdlib_info(self, target):
for l in self.get_used_languages(target):
if self.environment.cross_info.has_stdlib(l) \
and self.subproject != self.environment.cross_info.get_stdlib(l)[0]:
target.add_deps(self.build.cross_stdlibs[l])
def check_sources_exist(self, subdir, sources):
for s in sources:
if not isinstance(s, str):
continue # This means a generated source and they always exist.
fname = os.path.join(subdir, s)
if not os.path.isfile(fname):
raise InterpreterException('Tried to add non-existing source file %s.' % s)
def format_string(self, templ, args):
if isinstance(args, mparser.ArgumentNode):
args = args.arguments
arg_strings = []
for arg in args:
arg = self.evaluate_statement(arg)
if isinstance(arg, bool): # Python boolean is upper case.
arg = str(arg).lower()
arg_strings.append(str(arg))
def arg_replace(match):
idx = int(match.group(1))
if idx >= len(arg_strings):
raise InterpreterException('Format placeholder @{}@ out of range.'.format(idx))
return arg_strings[idx]
return re.sub(r'@(\d+)@', arg_replace, templ)
# Only permit object extraction from the same subproject
def validate_extraction(self, buildtarget):
if not self.subdir.startswith(self.subproject_dir):
if buildtarget.subdir.startswith(self.subproject_dir):
raise InterpreterException('Tried to extract objects from a subproject target.')
else:
if not buildtarget.subdir.startswith(self.subproject_dir):
raise InterpreterException('Tried to extract objects from the main project from a subproject.')
if self.subdir.split('/')[1] != buildtarget.subdir.split('/')[1]:
raise InterpreterException('Tried to extract objects from a different subproject.')
def check_contains(self, obj, args):
if len(args) != 1:
raise InterpreterException('Contains method takes exactly one argument.')
item = args[0]
for element in obj:
if isinstance(element, list):
found = self.check_contains(element, args)
if found:
return True
if element == item:
return True
return False
def is_subproject(self):
return self.subproject != ''
@noKwargs
@noArgsFlattening
def func_set_variable(self, node, args, kwargs):
if len(args) != 2:
raise InvalidCode('Set_variable takes two arguments.')
varname = args[0]
value = args[1]
self.set_variable(varname, value)
@noKwargs
@noArgsFlattening
def func_get_variable(self, node, args, kwargs):
if len(args) < 1 or len(args) > 2:
raise InvalidCode('Get_variable takes one or two arguments.')
varname = args[0]
if not isinstance(varname, str):
raise InterpreterException('First argument must be a string.')
try:
return self.variables[varname]
except KeyError:
pass
if len(args) == 2:
return args[1]
raise InterpreterException('Tried to get unknown variable "%s".' % varname)
@stringArgs
@noKwargs
def func_is_variable(self, node, args, kwargs):
if len(args) != 1:
raise InvalidCode('Is_variable takes two arguments.')
varname = args[0]
return varname in self.variables
| 45.851013
| 228
| 0.613273
|
a2669652042f37d08074ba7c4dae208df0a1a2a3
| 5,428
|
py
|
Python
|
src/image.py
|
WenbinFei/revamp
|
6c6bbba624caf55baf85def4cbfc989344aa3aec
|
[
"MIT"
] | null | null | null |
src/image.py
|
WenbinFei/revamp
|
6c6bbba624caf55baf85def4cbfc989344aa3aec
|
[
"MIT"
] | null | null | null |
src/image.py
|
WenbinFei/revamp
|
6c6bbba624caf55baf85def4cbfc989344aa3aec
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright (c) 2020, Wenbin Fei, Email: wenbinfei@gmail.com
# All rights reserved.
"""
Have fun with images
====================
"""
import logging.config
import time
import traceback
import os
import numpy as np
import cv2
# Create logger
import logger_ini
logger = logger_ini.logger_run()
def crop_journal_image(path):
"""
Before using this function:
Preparing your journal figures in Powerpoint either cross the whole width of the page
(figure in two columns in journal paper) or only half with of the page
(figure in signle column in journal paper). Cover other part using white box if necessary.
Crop the images generated by PPT acrrosing one column or two columns
in the journal Export a cropped image with name fig_crpped.XXX
:type path: string
:param path: the directory of the iamges
"""
start_time = time.time()
try:
store_row = []
img = cv2.imread(path)
for i in range(img.shape[0]):
img_row = img[i]
if(np.any(img_row<255)):
store_row_satrt = i
break
for i in range(img.shape[0]-1, 0, -1):
img_row = img[i]
if(np.any(img_row<255)):
store_row_end = i + 1
break
store_row = list(range(store_row_satrt, store_row_end))
# check whether the image only occupies one-column in the journal
# by checking either lef or right part acroosing the whole page is white
check_points = store_row
check_1_col = img[store_row, :]
column_num = img.shape[1] # only use one row to check to save time
img_row_spl = np.hsplit(check_1_col, [column_num//2]) # only check one part
img_row_left = np.all(img_row_spl[0] == 255) # whether the left part is empty
img_row_right = np.all(img_row_spl[1] == 255) # whether the right part is empty
if(img_row_right):
store_col = range(column_num//2 + 1) # only keep the index of the left part
crop_img = img[store_row][:, store_col]
elif(img_row_left):
store_col = range(column_num//2, icolumn_num) # only keep the index of the right part
crop_img = img[store_row][:, store_col]
else:
crop_img = img[store_row][:] # keep the whole line
except:
logger.error('[crop_journal_image failed]')
logger.error(traceback.format_exc())
raise
else:
stop_time = time.time()
dt = stop_time - start_time
logger.info(f"[crop_journal_image completed] {path} in {round(dt,4)} s")
# cv2.imshow('cropped', crop_img)
# cv2.waitKey(0)
# the cropped file with exported with a tailed _cropped afte the original name
path_split = path.rsplit('.', 1)
output_path = path_split[0] + '_cropped.' + path_split[-1]
cv2.imwrite(output_path, crop_img)
def resize_image(path, percentage):
"""
Resize an image according to the percentage using python opencv.
A new image will be generated with _resized.XX
:type path: string
:param path: the directory of the iamges
:type percentage: int
:param percentage: the percent of the original image
"""
start_time = time.time()
try:
img = cv2.imread(path, cv2.IMREAD_UNCHANGED)
width = int(img.shape[1] * percentage / 100)
height = int(img.shape[0] * percentage / 100)
dim = (width, height)
resized_img = cv2.resize(img, dim, interpolation = cv2.INTER_AREA)
except:
logger.error('[resize_image failed]')
logger.error(traceback.format_exc())
raise
else:
stop_time = time.time()
dt = stop_time - start_time
logger.info(f"[resize_image completed] {path} in {dt} s")
# cv2.imshow('cropped', crop_img)
# cv2.waitKey(0)
# the cropped file with exported with a tailed _cropped afte the original name
path_split = path.rsplit('.', 1)
output_path = path_split[0] + '_resized.' + path_split[-1]
cv2.imwrite(output_path, resized_img)
def generate_long_image(output_path):
"""
Generate long image by combing individual images
:type output_path: string
:param output_path: the directory of the iamges to be saved
"""
try:
picture_path = output_path[:output_path.rfind('.')]
last_dir = os.path.dirname(picture_path) # parent directory
# achevie individual images
ims = [Image.open(os.path.join(picture_path, fn)) for fn in os.listdir(picture_path)]
width, height = ims[0].size
long_canvas = Image.new(ims[1].mode, (width, height * len(ims))) #create n photos with the same width
except:
logger.error(traceback.format_exc())
else:
# merge figures
for i, image in enumerate(ims):
long_canvas.paste(image, box=(0, i * height))
long_canvas.save(os.path.join(last_dir, 'long-image.tif')) # save as long figure
logger.info(f"[generate_long_image completed]")
if __name__ == "__main__":
# crop_journal_image(r"../tests/individual/Slide2.Tif")
resize_image(r"../tests/individual/Slide2.Tif", 60)
| 36.186667
| 109
| 0.614038
|
81c11b915bb67f5c73388ce35882d0e1dbdf2392
| 3,311
|
py
|
Python
|
.history/galleria/settings_20201114121308.py
|
Nyash-Mauro/galleria
|
350519827cf86a7af3c334dfedb69c1452e4122a
|
[
"MIT"
] | null | null | null |
.history/galleria/settings_20201114121308.py
|
Nyash-Mauro/galleria
|
350519827cf86a7af3c334dfedb69c1452e4122a
|
[
"MIT"
] | null | null | null |
.history/galleria/settings_20201114121308.py
|
Nyash-Mauro/galleria
|
350519827cf86a7af3c334dfedb69c1452e4122a
|
[
"MIT"
] | null | null | null |
"""
Django settings for galleria project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import cloudinary
import cloudinary.uploader
import cloudinary.api
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9ush=sk*cyj20+g@-&l5bqy=m)f87l1_c5yl_6f&=#nkke+8g*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'galleria.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'galleria.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'tribune',
'USER': 'moringa',
'PASSWORD':'moringa',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Africa/Kenya'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
cloudinary.config(
cloud_name="dapwcit3i",
api_key="917726294659896",
api_secret="PeLRKhRoA2E-r-5ykRTpuEBNcH4"
)
| 25.274809
| 91
| 0.699185
|
203145c7c1c52cc202731491f2c6b358e8f009e6
| 1,432
|
py
|
Python
|
ketiga/cileunyi/shp/main.py
|
alitkurniawan48/BelajarGIS
|
c52556bc6fa74b849b9c3461410805807b742967
|
[
"MIT"
] | 2
|
2020-02-09T14:47:07.000Z
|
2020-02-09T14:47:12.000Z
|
ketiga/cileunyi/shp/main.py
|
alitkurniawan48/BelajarGIS
|
c52556bc6fa74b849b9c3461410805807b742967
|
[
"MIT"
] | 12
|
2019-12-11T06:45:59.000Z
|
2020-01-06T09:35:35.000Z
|
ketiga/cileunyi/shp/main.py
|
alitkurniawan48/BelajarGIS
|
c52556bc6fa74b849b9c3461410805807b742967
|
[
"MIT"
] | 71
|
2019-12-09T13:52:54.000Z
|
2021-05-28T16:19:09.000Z
|
import cileunyi
cileunyi = cileunyi.Cileunyi()
#Harun Ar - Rasyid - 1174027
cileunyi.desaCibiruHilir('Desa Cibiru Hilir')
#Harun Ar - Rasyid - 1174027
cileunyi.desaCileunyi('Desa Cileunyi')
#Kadek Diva Krishna Murti - 1174006
cileunyi.desaCileunyiWetan('Desa Cileunyi Wetan')
#Nico Ekklesia Sembiring - 1174096
cileunyi.desaCikemar('Desa Cikemar')
#Kadek Diva Krishna Murti - 1174006
cileunyi.desaCinunuk('Desa Cinunuk')
#Kadek Diva Krishna Murti - 1174006
cileunyi.kantorDesaCikemar('Kantor Desa Cikemar')
#Kadek Diva Krishna Murti - 1174006
cileunyi.kantorDesaCileunyiKulon('kantor Desa Cileunyi')
#Kadek Diva Krishna Murti - 1174006
cileunyi.kantorDesaCibiruWetan('Kantor Desa Cibiru Wetan')
#Dwi Yulianingsih - 1174009
cileunyi.kantorDesaCinunuk('Kantor Desa Cinunuk')
#Dwi Septiani Tsaniyah - 1174003
cileunyi.kantorKepalaDesaCileunyi('Kantor kepala Desa Cileunyi')
#Habib Abdul R - 1174002
cileunyi.RuteJalanCileunyi('Jalan Cileunyi')
#Felix Lase - 1174026
cileunyi.RuteJalanSukarameRancaBangoNo75('Rute Jalan Sukarame')
cileunyi.RuteJalanCikonengSatu('Rute Jalan Cikoneng')
#Evietania Charis Sujadi - 1174051
cileunyi.RuteJalanCimincrang('Rute Jalan Cimincrang')
cileunyi.RuteJalanRayaTagog('Rute Jalan Raya Tagog')
#Muhammad Dzihan Al-Banna - 1174095
cileunyi.RuteJalanSadangNo66Cinunuk('Rute jalan Cinunuk')
#Arjun Yuda Firwanda 1174008
cileunyi.RuteJalanBayangkaraCibiruHilir('Rute Jalan cibiru Hilir')
cileunyi.close()
| 37.684211
| 66
| 0.817039
|
8bf716be8df4ec8f523708dfe3a3e897fa7f9dd0
| 641
|
py
|
Python
|
test/vanilla/Expected/AcceptanceTests/RequiredOptional/requiredoptional/operations/__init__.py
|
qwordy/autorest.python
|
6b12df51c2a39a1285546b5a771b69f5896e794f
|
[
"MIT"
] | 35
|
2018-04-03T12:15:53.000Z
|
2022-03-11T14:03:34.000Z
|
test/vanilla/Expected/AcceptanceTests/RequiredOptional/requiredoptional/operations/__init__.py
|
qwordy/autorest.python
|
6b12df51c2a39a1285546b5a771b69f5896e794f
|
[
"MIT"
] | 652
|
2017-08-28T22:44:41.000Z
|
2022-03-31T21:20:31.000Z
|
test/vanilla/Expected/AcceptanceTests/RequiredOptional/requiredoptional/operations/__init__.py
|
qwordy/autorest.python
|
6b12df51c2a39a1285546b5a771b69f5896e794f
|
[
"MIT"
] | 29
|
2017-08-28T20:57:01.000Z
|
2022-03-11T14:03:38.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._implicit_operations import ImplicitOperations
from ._explicit_operations import ExplicitOperations
__all__ = [
"ImplicitOperations",
"ExplicitOperations",
]
| 40.0625
| 94
| 0.600624
|
4ae934af1f521469010b855662591cb7da5a9784
| 160
|
py
|
Python
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_LinearTrend_Seasonal_DayOfMonth_SVR.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_LinearTrend_Seasonal_DayOfMonth_SVR.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/model_control/detailed/transf_Logit/model_control_one_enabled_Logit_LinearTrend_Seasonal_DayOfMonth_SVR.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Logit'] , ['LinearTrend'] , ['Seasonal_DayOfMonth'] , ['SVR'] );
| 40
| 87
| 0.75625
|
9da14cace2c0a66351ccb4699ee8e62bc4f3a711
| 325
|
py
|
Python
|
deeppavlov/_meta.py
|
deepmipt/pilot
|
67384e1b026ebb8ba6794a05613dd036e86635ec
|
[
"Apache-2.0"
] | null | null | null |
deeppavlov/_meta.py
|
deepmipt/pilot
|
67384e1b026ebb8ba6794a05613dd036e86635ec
|
[
"Apache-2.0"
] | null | null | null |
deeppavlov/_meta.py
|
deepmipt/pilot
|
67384e1b026ebb8ba6794a05613dd036e86635ec
|
[
"Apache-2.0"
] | null | null | null |
__version__ = '0.17.3'
__author__ = 'Neural Networks and Deep Learning lab, MIPT'
__description__ = 'An open source library for building end-to-end dialog systems and training chatbots.'
__keywords__ = ['NLP', 'NER', 'SQUAD', 'Intents', 'Chatbot']
__license__ = 'Apache License, Version 2.0'
__email__ = 'info@deeppavlov.ai'
| 46.428571
| 104
| 0.741538
|
789131f6cb66fd1536a52290d841d16d51e6fb35
| 1,280
|
py
|
Python
|
lightautoml/tasks/utils.py
|
PhySci/LightAutoML
|
e860991ba36361629fe9e11e034c5e976fe4219b
|
[
"Apache-2.0"
] | 1
|
2021-10-02T05:28:16.000Z
|
2021-10-02T05:28:16.000Z
|
lightautoml/tasks/utils.py
|
DESimakov/LightAutoML
|
2eaa05b27c63c613965d50cdb7d52da5d245d9af
|
[
"Apache-2.0"
] | null | null | null |
lightautoml/tasks/utils.py
|
DESimakov/LightAutoML
|
2eaa05b27c63c613965d50cdb7d52da5d245d9af
|
[
"Apache-2.0"
] | null | null | null |
"""."""
from typing import Callable
import numpy as np
def infer_gib(metric: Callable) -> bool:
"""Infer greater is better from metric.
Args:
metric: Score or loss function.
Returns:
```True``` if grater is better.
Raises:
AssertionError: If there is no way to order the predictions.
"""
label = np.array([0, 1])
pred = np.array([0.1, 0.9])
g_val = metric(label, pred)
b_val = metric(label, pred[::-1])
assert g_val != b_val, (
"Cannot infer greater is better from metric." " Should be set manually."
)
return g_val > b_val
def infer_gib_multiclass(metric: Callable) -> bool:
"""Infer greater is better from metric.
Args:
metric: Metric function. It must take two
arguments y_true, y_pred.
Returns:
```True``` if grater is better.
Raises:
AssertionError: If there is no way to order the predictions.
"""
label = np.array([0, 1, 2])
pred = np.array([[0.9, 0.05, 0.05], [0.05, 0.9, 0.05], [0.05, 0.05, 0.9]])
g_val = metric(label, pred)
b_val = metric(label, pred[::-1])
assert g_val != b_val, (
"Cannot infer greater is better from metric. " "Should be set manually."
)
return g_val > b_val
| 21.694915
| 80
| 0.590625
|
463ece0f7be59780e121719edd7c6b09d8a810d7
| 6,639
|
py
|
Python
|
pygks/reg_gng.py
|
sbxzy/pygks_package
|
9e2c4910ee0eb83e6fa710f97aa39dde285bc761
|
[
"BSD-3-Clause"
] | null | null | null |
pygks/reg_gng.py
|
sbxzy/pygks_package
|
9e2c4910ee0eb83e6fa710f97aa39dde285bc761
|
[
"BSD-3-Clause"
] | null | null | null |
pygks/reg_gng.py
|
sbxzy/pygks_package
|
9e2c4910ee0eb83e6fa710f97aa39dde285bc761
|
[
"BSD-3-Clause"
] | null | null | null |
"""GNG regressor"""
from . import gng2
from .kde import density
from .__gaussian_custom import norm_pdf_multivariate
from numpy import array,diag,matrix
import time
from pygraph.classes.graph import graph
from pygraph.algorithms.accessibility import connected_components
import itertools
from .gks import GKS
from copy import deepcopy
class GNGregressor:
"""Regression interface based on SSL-GKS and GNG. smooth can be set to None or real number, normally falls in [-1,0]. If set to None, SSL will be employed to estimate its value.
response_dimension is integer, means the number of response variables.
K is integer which is the number of neurons for kernel smoothing, larger K means little details but more smoothed predictions.
The rest of the parameters are GNG training parameters."""
Pis = [] #:Distribution of the neuron populations.
bands = [] #:Bandwidth for visualization.
nodes = [] #:Weights of the neurons.
sigmax = []
ux = []
uy = []
gr = [] #:Topology structure of neurons.
counts = 0
standard_deviation = 0
smooth = -0.4 #:Smooth parameter for kernel smoothing, if set to None, SSL smooth parameter selection will be employed.
reg_model = None
__res_dimension = 1
__global = False
__gng_parameter_list = []
K = 10 #:Number of neurons selected for kernel smoothing.
def __init__(self,smooth = None,response_dimension = 1,K=10,age_max = 100,nn_lambda = 50,ann = 0.5,bnn = 0.0005,eb = 0.05,en = 0.0006):
gng2.set_parameter(age_max,nn_lambda,ann,bnn,eb,en)
self.__gng_parameter_list = [age_max,nn_lambda,ann,bnn,eb,en]
self.smooth = smooth
self.__res_dimension = 1
self.K = K
def fit(self, X, y):
"""X is array or list, each element is numpy array. Y is array or list containing the response varaible values."""
#print 'training with bandwidth calculation, please wait...'
timecost = time.time()
t = 0
for i in range(len(y)):
n_point = array(list(X[i]) + list([y[i]]))
if t == 0:
EX = n_point
EX2 = n_point ** 2
else:
count = float(t)
EX = (EX*count/(count + 1.0)) + (n_point/(count + 1.0))
EX2 = (EX2*count/(count + 1.0)) + ((n_point ** 2)/(count + 1.0))
t += 1
gng2.step(n_point,0,t)
gng2.step(array([]),0,-1)
#print 'time cost',time.time() - timecost
standard_deviation = (EX2 - EX ** 2) ** 0.5
self.standard_deviation = standard_deviation
if self.smooth == None:
self.bands = standard_deviation * (len(gng2.setN) ** (-0.2))
else:
self.bands = standard_deviation * (len(gng2.setN) ** (self.smooth))
Pis = gng2.accumulated
self.counts = gng2.accumulated
self.Pis = array(Pis) / float(sum(array(Pis)))#distribution of the clusters
self.nodes = deepcopy(gng2.setN)
self.sigmax = matrix(diag(array(self.bands)[0:-1]**2))
for each in self.nodes:
self.ux.append(each[0:-1])
self.uy.append(each[-1])
self.uy = array(self.uy)
self.gr = gng2.gr
self.reg_model = GKS(self.nodes, self.counts, standard_deviation**2, self.__res_dimension, self.smooth, self.K)
def predict(self, data):
"""This method returns the predictions the variable data. data should be within the same data space to X in the fit method. When smooth parameter is set to None, an SSL
procedure will be employed to estimate it."""
if self.smooth == None:
gng2.set_parameter(self.__gng_parameter_list[0],self.__gng_parameter_list[1],self.__gng_parameter_list[2],self.__gng_parameter_list[3],self.__gng_parameter_list[4],self.__gng_parameter_list[5])
t = 0
for i in range(len(data)):
n_point = array(data[i])
if t == 0:
EX = n_point
EX2 = n_point ** 2
else:
count = float(t)
EX = (EX*count/(count + 1.0)) + (n_point/(count + 1.0))
EX2 = (EX2*count/(count + 1.0)) + ((n_point ** 2)/(count + 1.0))
t += 1
gng2.step(n_point,0,t)
gng2.step(array([]),0,-1)
return self.reg_model.responses(data, gng2.setN)
else:
return self.reg_model.responses(data)
def draw_density(self, resolution = 0.05):
"""Draws the density contour of any regressor instance. It can only be called after calling the fit
method, and only work in 2d case. resolution is a postitive real number definining the detail level of drawing.
A smaller resolution number will generate more detailed drawings."""
from numpy import mgrid,zeros
from copy import deepcopy
the_d = density(self.nodes,array(self.counts),self.standard_deviation)
dx, dy = resolution, resolution
# generate 2 2d grids for the x & y bounds
y, x = mgrid[slice(0, 1 + dy, dy),slice(0, 1 + dx, dx)]
t=deepcopy(x[0])
z = zeros(shape = (len(x[0]),len(y[0])))
z1= zeros(shape = (len(x[0]),len(y[0])))
print('Please wait...')
for i in range(len(t)):
for j in range(len(t)):
input_point = array([t[i],t[j]])
z[j][i] = the_d.estimate(input_point)
if not ((input_point - array([0.5,0.2])).any()):
print(i,j)
print('drawing...')
import matplotlib.pyplot as plt
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
z = z[:-1, :-1]
levels = MaxNLocator(nbins=15).bin_boundaries(z.min(), z.max())
cmap = plt.get_cmap('PiYG')
plt.contourf(x[:-1, :-1] + dx / 2., y[:-1, :-1] + dy / 2., z, levels=levels, cmap=cmap)
plt.colorbar()
plt.title('Density estimation by GNG')
plt.show()
if __name__ == '__main__':
from xzyutil.csv_reader import csv_reader
r = csv_reader('reg_intro.csv')
X,y = r.seperate_label()
the_reg = GNGregressor(smooth = None, K = 10)
the_reg.fit(X,y)
# the_reg.draw_density()
test_x = []
draw_x = []
for i in range(50):
test_x.append(array([i/50.0]))
draw_x.append(i/50.0)
test_y = the_reg.predict(test_x)
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(draw_x,test_y,'k-')
plt.axis('off')
plt.show()
| 42.832258
| 205
| 0.595421
|
63826b89c4d07d4a48754b219322427c80da2fe5
| 430
|
py
|
Python
|
real-world-examples/turtle-examples/square-circle-loop-turtle.py
|
sumanchary86/learning-python
|
99ae9c31d62a07d1363b67f22f93173730346d76
|
[
"MIT"
] | 19
|
2019-08-30T06:51:52.000Z
|
2022-03-11T18:44:29.000Z
|
real-world-examples/turtle-examples/square-circle-loop-turtle.py
|
jassics/learning-python
|
b8865126e96ae79c45759186573e84d4eb11bde9
|
[
"MIT"
] | 9
|
2020-02-14T09:21:20.000Z
|
2022-03-08T09:38:09.000Z
|
real-world-examples/turtle-examples/square-circle-loop-turtle.py
|
sumanchary86/learning-python
|
99ae9c31d62a07d1363b67f22f93173730346d76
|
[
"MIT"
] | 12
|
2020-07-20T18:49:45.000Z
|
2021-12-18T11:20:03.000Z
|
import turtle
def draw_square(some_turtle_name):
for i in range(1,5):
some_turtle_name.forward(100)
some_turtle_name.right(90)
def draw_art():
window = turtle.Screen()
window.bgcolor('red')
brad = turtle.Turtle()
brad.shape('turtle')
brad.color('yellow')
brad.speed(2)
for i in range(1,37):
draw_square(brad)
brad.right(10)
window.exitonclick()
draw_art()
| 17.916667
| 37
| 0.627907
|
9629a08d978eae11d011f36573f5fd9244635535
| 183,901
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/acs/custom.py
|
istavrinides/azure-cli
|
fd310add0dfe78ae568d38ba85748c51fd83556c
|
[
"MIT"
] | 1
|
2021-05-03T21:33:51.000Z
|
2021-05-03T21:33:51.000Z
|
src/azure-cli/azure/cli/command_modules/acs/custom.py
|
istavrinides/azure-cli
|
fd310add0dfe78ae568d38ba85748c51fd83556c
|
[
"MIT"
] | null | null | null |
src/azure-cli/azure/cli/command_modules/acs/custom.py
|
istavrinides/azure-cli
|
fd310add0dfe78ae568d38ba85748c51fd83556c
|
[
"MIT"
] | 1
|
2021-03-02T09:26:15.000Z
|
2021-03-02T09:26:15.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import binascii
import datetime
import errno
import json
import os
import os.path
import platform
import random
import re
import shutil
import ssl
import stat
import string
import subprocess
import sys
import tempfile
import threading
import time
import uuid
import webbrowser
from distutils.version import StrictVersion
from math import isnan
from six.moves.urllib.request import urlopen # pylint: disable=import-error
from six.moves.urllib.error import URLError # pylint: disable=import-error
# pylint: disable=import-error
import yaml
import dateutil.parser
from dateutil.relativedelta import relativedelta
from knack.log import get_logger
from knack.util import CLIError
from knack.prompting import prompt_pass, NoTTYException, prompt_y_n
from msrestazure.azure_exceptions import CloudError
import requests
# pylint: disable=no-name-in-module,import-error
from azure.cli.command_modules.acs import acs_client, proxy
from azure.cli.command_modules.acs._params import regions_in_preview, regions_in_prod
from azure.cli.core.api import get_config_dir
from azure.cli.core.azclierror import (ResourceNotFoundError,
ArgumentUsageError,
ClientRequestError,
InvalidArgumentValueError,
ValidationError)
from azure.cli.core._profile import Profile
from azure.cli.core.commands.client_factory import get_mgmt_service_client, get_subscription_id
from azure.cli.core.keys import is_valid_ssh_rsa_public_key
from azure.cli.core.util import in_cloud_console, shell_safe_json_parse, truncate_text, sdk_no_wait
from azure.cli.core.commands import LongRunningOperation
from azure.graphrbac.models import (ApplicationCreateParameters,
ApplicationUpdateParameters,
PasswordCredential,
KeyCredential,
ServicePrincipalCreateParameters,
GetObjectsParameters,
ResourceAccess, RequiredResourceAccess)
from azure.mgmt.containerservice.models import ContainerServiceOrchestratorTypes
from azure.mgmt.containerservice.v2020_09_01.models import ContainerServiceNetworkProfile
from azure.mgmt.containerservice.v2020_09_01.models import ContainerServiceLinuxProfile
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterServicePrincipalProfile
from azure.mgmt.containerservice.v2020_09_01.models import ContainerServiceSshConfiguration
from azure.mgmt.containerservice.v2020_09_01.models import ContainerServiceSshPublicKey
from azure.mgmt.containerservice.v2020_09_01.models import ManagedCluster
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterAADProfile
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterAddonProfile
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterAgentPoolProfile
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterIdentity
from azure.mgmt.containerservice.v2020_09_01.models import AgentPool
from azure.mgmt.containerservice.v2020_09_01.models import AgentPoolUpgradeSettings
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterSKU
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterWindowsProfile
from azure.mgmt.containerservice.v2020_09_01.models import ManagedClusterIdentityUserAssignedIdentitiesValue
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftManagedClusterAgentPoolProfile
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftAgentPoolProfileRole
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftManagedClusterIdentityProvider
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftManagedClusterAADIdentityProvider
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftManagedCluster
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftRouterProfile
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftManagedClusterAuthProfile
from azure.mgmt.containerservice.v2019_09_30_preview.models import NetworkProfile
from azure.mgmt.containerservice.v2019_09_30_preview.models import OpenShiftManagedClusterMonitorProfile
from ._client_factory import cf_container_services
from ._client_factory import cf_resource_groups
from ._client_factory import get_auth_management_client
from ._client_factory import get_graph_rbac_management_client
from ._client_factory import cf_resources
from ._client_factory import get_resource_by_name
from ._client_factory import cf_container_registry_service
from ._client_factory import cf_managed_clusters
from ._client_factory import get_msi_client
from ._helpers import (_populate_api_server_access_profile, _set_vm_set_type, _set_outbound_type,
_parse_comma_separated_list)
from ._loadbalancer import (set_load_balancer_sku, is_load_balancer_profile_provided,
update_load_balancer_profile, create_load_balancer_profile)
from ._consts import CONST_SCALE_SET_PRIORITY_REGULAR, CONST_SCALE_SET_PRIORITY_SPOT, CONST_SPOT_EVICTION_POLICY_DELETE
from ._consts import CONST_HTTP_APPLICATION_ROUTING_ADDON_NAME
from ._consts import CONST_MONITORING_ADDON_NAME
from ._consts import CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID
from ._consts import CONST_VIRTUAL_NODE_ADDON_NAME
from ._consts import CONST_VIRTUAL_NODE_SUBNET_NAME
from ._consts import CONST_KUBE_DASHBOARD_ADDON_NAME
from ._consts import CONST_AZURE_POLICY_ADDON_NAME
from ._consts import CONST_INGRESS_APPGW_ADDON_NAME
from ._consts import CONST_INGRESS_APPGW_APPLICATION_GATEWAY_ID, CONST_INGRESS_APPGW_APPLICATION_GATEWAY_NAME
from ._consts import CONST_INGRESS_APPGW_SUBNET_CIDR, CONST_INGRESS_APPGW_SUBNET_ID
from ._consts import CONST_INGRESS_APPGW_WATCH_NAMESPACE
from ._consts import ADDONS
from ._consts import CONST_CANIPULL_IMAGE
logger = get_logger(__name__)
# pylint:disable=too-many-lines,unused-argument
def which(binary):
path_var = os.getenv('PATH')
if platform.system() == 'Windows':
binary = binary + '.exe'
parts = path_var.split(';')
else:
parts = path_var.split(':')
for part in parts:
bin_path = os.path.join(part, binary)
if os.path.exists(bin_path) and os.path.isfile(bin_path) and os.access(bin_path, os.X_OK):
return bin_path
return None
def wait_then_open(url):
"""
Waits for a bit then opens a URL. Useful for waiting for a proxy to come up, and then open the URL.
"""
for _ in range(1, 10):
try:
urlopen(url, context=_ssl_context())
except URLError:
time.sleep(1)
break
webbrowser.open_new_tab(url)
def wait_then_open_async(url):
"""
Spawns a thread that waits for a bit then opens a URL.
"""
t = threading.Thread(target=wait_then_open, args=({url}))
t.daemon = True
t.start()
def acs_browse(cmd, client, resource_group_name, name, disable_browser=False, ssh_key_file=None):
"""
Opens a browser to the web interface for the cluster orchestrator
:param name: Name of the target Azure container service instance.
:type name: String
:param resource_group_name: Name of Azure container service's resource group.
:type resource_group_name: String
:param disable_browser: If true, don't launch a web browser after estabilishing the proxy
:type disable_browser: bool
:param ssh_key_file: If set a path to an SSH key to use, only applies to DCOS
:type ssh_key_file: string
"""
acs_info = _get_acs_info(cmd.cli_ctx, name, resource_group_name)
_acs_browse_internal(cmd, client, acs_info, resource_group_name, name, disable_browser, ssh_key_file)
def _acs_browse_internal(cmd, client, acs_info, resource_group_name, name, disable_browser, ssh_key_file):
orchestrator_type = acs_info.orchestrator_profile.orchestrator_type # pylint: disable=no-member
if str(orchestrator_type).lower() == 'kubernetes' or \
orchestrator_type == ContainerServiceOrchestratorTypes.kubernetes or \
(acs_info.custom_profile and acs_info.custom_profile.orchestrator == 'kubernetes'): # pylint: disable=no-member
return k8s_browse(cmd, client, name, resource_group_name, disable_browser, ssh_key_file=ssh_key_file)
if str(orchestrator_type).lower() == 'dcos' or orchestrator_type == ContainerServiceOrchestratorTypes.dcos:
return _dcos_browse_internal(acs_info, disable_browser, ssh_key_file)
raise CLIError('Unsupported orchestrator type {} for browse'.format(orchestrator_type))
def k8s_browse(cmd, client, name, resource_group_name, disable_browser=False, ssh_key_file=None):
"""
Launch a proxy and browse the Kubernetes web UI.
:param disable_browser: If true, don't launch a web browser after estabilishing the proxy
:type disable_browser: bool
"""
acs_info = _get_acs_info(cmd.cli_ctx, name, resource_group_name)
_k8s_browse_internal(name, acs_info, disable_browser, ssh_key_file)
def _k8s_browse_internal(name, acs_info, disable_browser, ssh_key_file):
if not which('kubectl'):
raise CLIError('Can not find kubectl executable in PATH')
browse_path = os.path.join(get_config_dir(), 'acsBrowseConfig.yaml')
if os.path.exists(browse_path):
os.remove(browse_path)
_k8s_get_credentials_internal(name, acs_info, browse_path, ssh_key_file, False)
logger.warning('Proxy running on 127.0.0.1:8001/ui')
logger.warning('Press CTRL+C to close the tunnel...')
if not disable_browser:
wait_then_open_async('http://127.0.0.1:8001/ui')
subprocess.call(["kubectl", "--kubeconfig", browse_path, "proxy"])
def dcos_browse(cmd, client, name, resource_group_name, disable_browser=False, ssh_key_file=None):
"""
Creates an SSH tunnel to the Azure container service, and opens the Mesosphere DC/OS dashboard in the browser.
:param name: name: Name of the target Azure container service instance.
:type name: String
:param resource_group_name: Name of Azure container service's resource group.
:type resource_group_name: String
:param disable_browser: If true, don't launch a web browser after estabilishing the proxy
:type disable_browser: bool
:param ssh_key_file: Path to the SSH key to use
:type ssh_key_file: string
"""
acs_info = _get_acs_info(cmd.cli_ctx, name, resource_group_name)
_dcos_browse_internal(acs_info, disable_browser, ssh_key_file)
def _dcos_browse_internal(acs_info, disable_browser, ssh_key_file):
if not os.path.isfile(ssh_key_file):
raise CLIError('Private key file {} does not exist'.format(ssh_key_file))
acs = acs_client.ACSClient()
if not acs.connect(_get_host_name(acs_info), _get_username(acs_info),
key_filename=ssh_key_file):
raise CLIError('Error connecting to ACS: {}'.format(_get_host_name(acs_info)))
octarine_bin = '/opt/mesosphere/bin/octarine'
if not acs.file_exists(octarine_bin):
raise CLIError('Proxy server ({}) does not exist on the cluster.'.format(octarine_bin))
proxy_id = _rand_str(16)
proxy_cmd = '{} {}'.format(octarine_bin, proxy_id)
acs.run(proxy_cmd, background=True)
# Parse the output to get the remote PORT
proxy_client_cmd = '{} --client --port {}'.format(octarine_bin, proxy_id)
stdout, _ = acs.run(proxy_client_cmd)
remote_port = int(stdout.read().decode().strip())
local_port = acs.get_available_local_port()
# Set the proxy
proxy.set_http_proxy('127.0.0.1', local_port)
logger.warning('Proxy running on 127.0.0.1:%s', local_port)
logger.warning('Press CTRL+C to close the tunnel...')
if not disable_browser:
wait_then_open_async('http://127.0.0.1')
try:
acs.create_tunnel(
remote_host='127.0.0.1',
remote_port=remote_port,
local_port=local_port)
finally:
proxy.disable_http_proxy()
def acs_install_cli(cmd, client, resource_group_name, name, install_location=None, client_version=None):
acs_info = _get_acs_info(cmd.cli_ctx, name, resource_group_name)
orchestrator_type = acs_info.orchestrator_profile.orchestrator_type # pylint: disable=no-member
kwargs = {'install_location': install_location}
if client_version:
kwargs['client_version'] = client_version
if orchestrator_type == 'kubernetes':
return k8s_install_cli(**kwargs)
if orchestrator_type == 'dcos':
return dcos_install_cli(**kwargs)
raise CLIError('Unsupported orchestrator type {} for install-cli'.format(orchestrator_type))
def _ssl_context():
if sys.version_info < (3, 4) or (in_cloud_console() and platform.system() == 'Windows'):
try:
return ssl.SSLContext(ssl.PROTOCOL_TLS) # added in python 2.7.13 and 3.6
except AttributeError:
return ssl.SSLContext(ssl.PROTOCOL_TLSv1)
return ssl.create_default_context()
def _urlretrieve(url, filename):
req = urlopen(url, context=_ssl_context())
with open(filename, "wb") as f:
f.write(req.read())
def _unzip(src, dest):
logger.debug('Extracting %s to %s.', src, dest)
system = platform.system()
if system in ('Linux', 'Darwin', 'Windows'):
import zipfile
with zipfile.ZipFile(src, 'r') as zipObj:
zipObj.extractall(dest)
else:
raise CLIError('The current system is not supported.')
def dcos_install_cli(cmd, install_location=None, client_version='1.8'):
"""
Downloads the dcos command line from Mesosphere
"""
system = platform.system()
if not install_location:
raise CLIError(
"No install location specified and it could not be determined from the current platform '{}'".format(
system))
base_url = 'https://downloads.dcos.io/binaries/cli/{}/x86-64/dcos-{}/{}'
if system == 'Windows':
file_url = base_url.format('windows', client_version, 'dcos.exe')
elif system == 'Linux':
# TODO Support ARM CPU here
file_url = base_url.format('linux', client_version, 'dcos')
elif system == 'Darwin':
file_url = base_url.format('darwin', client_version, 'dcos')
else:
raise CLIError('Proxy server ({}) does not exist on the cluster.'.format(system))
logger.warning('Downloading client to %s', install_location)
try:
_urlretrieve(file_url, install_location)
os.chmod(install_location,
os.stat(install_location).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
except IOError as err:
raise CLIError('Connection error while attempting to download client ({})'.format(err))
def k8s_install_cli(cmd, client_version='latest', install_location=None, base_src_url=None,
kubelogin_version='latest', kubelogin_install_location=None,
kubelogin_base_src_url=None):
k8s_install_kubectl(cmd, client_version, install_location, base_src_url)
k8s_install_kubelogin(cmd, kubelogin_version, kubelogin_install_location, kubelogin_base_src_url)
def k8s_install_kubectl(cmd, client_version='latest', install_location=None, source_url=None):
"""
Install kubectl, a command-line interface for Kubernetes clusters.
"""
if not source_url:
source_url = "https://storage.googleapis.com/kubernetes-release/release"
cloud_name = cmd.cli_ctx.cloud.name
if cloud_name.lower() == 'azurechinacloud':
source_url = 'https://mirror.azure.cn/kubernetes/kubectl'
if client_version == 'latest':
context = _ssl_context()
version = urlopen(source_url + '/stable.txt', context=context).read()
client_version = version.decode('UTF-8').strip()
else:
client_version = "v%s" % client_version
file_url = ''
system = platform.system()
base_url = source_url + '/{}/bin/{}/amd64/{}'
# ensure installation directory exists
install_dir, cli = os.path.dirname(install_location), os.path.basename(install_location)
if not os.path.exists(install_dir):
os.makedirs(install_dir)
if system == 'Windows':
file_url = base_url.format(client_version, 'windows', 'kubectl.exe')
elif system == 'Linux':
# TODO: Support ARM CPU here
file_url = base_url.format(client_version, 'linux', 'kubectl')
elif system == 'Darwin':
file_url = base_url.format(client_version, 'darwin', 'kubectl')
else:
raise CLIError('Proxy server ({}) does not exist on the cluster.'.format(system))
logger.warning('Downloading client to "%s" from "%s"', install_location, file_url)
try:
_urlretrieve(file_url, install_location)
os.chmod(install_location,
os.stat(install_location).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
except IOError as ex:
raise CLIError('Connection error while attempting to download client ({})'.format(ex))
if system == 'Windows': # be verbose, as the install_location likely not in Windows's search PATHs
env_paths = os.environ['PATH'].split(';')
found = next((x for x in env_paths if x.lower().rstrip('\\') == install_dir.lower()), None)
if not found:
# pylint: disable=logging-format-interpolation
logger.warning('Please add "{0}" to your search PATH so the `{1}` can be found. 2 options: \n'
' 1. Run "set PATH=%PATH%;{0}" or "$env:path += \'{0}\'" for PowerShell. '
'This is good for the current command session.\n'
' 2. Update system PATH environment variable by following '
'"Control Panel->System->Advanced->Environment Variables", and re-open the command window. '
'You only need to do it once'.format(install_dir, cli))
else:
logger.warning('Please ensure that %s is in your search PATH, so the `%s` command can be found.',
install_dir, cli)
def k8s_install_kubelogin(cmd, client_version='latest', install_location=None, source_url=None):
"""
Install kubelogin, a client-go credential (exec) plugin implementing azure authentication.
"""
cloud_name = cmd.cli_ctx.cloud.name
if not source_url:
source_url = 'https://github.com/Azure/kubelogin/releases/download'
if cloud_name.lower() == 'azurechinacloud':
source_url = 'https://mirror.azure.cn/kubernetes/kubelogin'
if client_version == 'latest':
context = _ssl_context()
latest_release_url = 'https://api.github.com/repos/Azure/kubelogin/releases/latest'
if cloud_name.lower() == 'azurechinacloud':
latest_release_url = 'https://mirror.azure.cn/kubernetes/kubelogin/latest'
latest_release = urlopen(latest_release_url, context=context).read()
client_version = json.loads(latest_release)['tag_name'].strip()
else:
client_version = "v%s" % client_version
base_url = source_url + '/{}/kubelogin.zip'
file_url = base_url.format(client_version)
# ensure installation directory exists
install_dir, cli = os.path.dirname(install_location), os.path.basename(install_location)
if not os.path.exists(install_dir):
os.makedirs(install_dir)
system = platform.system()
if system == 'Windows':
sub_dir, binary_name = 'windows_amd64', 'kubelogin.exe'
elif system == 'Linux':
# TODO: Support ARM CPU here
sub_dir, binary_name = 'linux_amd64', 'kubelogin'
elif system == 'Darwin':
sub_dir, binary_name = 'darwin_amd64', 'kubelogin'
else:
raise CLIError('Proxy server ({}) does not exist on the cluster.'.format(system))
with tempfile.TemporaryDirectory() as tmp_dir:
try:
download_path = os.path.join(tmp_dir, 'kubelogin.zip')
logger.warning('Downloading client to "%s" from "%s"', download_path, file_url)
_urlretrieve(file_url, download_path)
except IOError as ex:
raise CLIError('Connection error while attempting to download client ({})'.format(ex))
_unzip(download_path, tmp_dir)
download_path = os.path.join(tmp_dir, 'bin', sub_dir, binary_name)
shutil.move(download_path, install_location)
os.chmod(install_location, os.stat(install_location).st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
if system == 'Windows': # be verbose, as the install_location likely not in Windows's search PATHs
env_paths = os.environ['PATH'].split(';')
found = next((x for x in env_paths if x.lower().rstrip('\\') == install_dir.lower()), None)
if not found:
# pylint: disable=logging-format-interpolation
logger.warning('Please add "{0}" to your search PATH so the `{1}` can be found. 2 options: \n'
' 1. Run "set PATH=%PATH%;{0}" or "$env:path += \'{0}\'" for PowerShell. '
'This is good for the current command session.\n'
' 2. Update system PATH environment variable by following '
'"Control Panel->System->Advanced->Environment Variables", and re-open the command window. '
'You only need to do it once'.format(install_dir, cli))
else:
logger.warning('Please ensure that %s is in your search PATH, so the `%s` command can be found.',
install_dir, cli)
def _build_service_principal(rbac_client, cli_ctx, name, url, client_secret):
# use get_progress_controller
hook = cli_ctx.get_progress_controller(True)
hook.add(messsage='Creating service principal', value=0, total_val=1.0)
logger.info('Creating service principal')
# always create application with 5 years expiration
start_date = datetime.datetime.utcnow()
end_date = start_date + relativedelta(years=5)
result, aad_session_key = create_application(rbac_client.applications, name, url, [url], password=client_secret,
start_date=start_date, end_date=end_date)
service_principal = result.app_id # pylint: disable=no-member
for x in range(0, 10):
hook.add(message='Creating service principal', value=0.1 * x, total_val=1.0)
try:
create_service_principal(cli_ctx, service_principal, rbac_client=rbac_client)
break
# TODO figure out what exception AAD throws here sometimes.
except Exception as ex: # pylint: disable=broad-except
logger.info(ex)
time.sleep(2 + 2 * x)
else:
return False, aad_session_key
hook.add(message='Finished service principal creation', value=1.0, total_val=1.0)
logger.info('Finished service principal creation')
return service_principal, aad_session_key
def _add_role_assignment(cli_ctx, role, service_principal_msi_id, is_service_principal=True, delay=2, scope=None):
# AAD can have delays in propagating data, so sleep and retry
hook = cli_ctx.get_progress_controller(True)
hook.add(message='Waiting for AAD role to propagate', value=0, total_val=1.0)
logger.info('Waiting for AAD role to propagate')
for x in range(0, 10):
hook.add(message='Waiting for AAD role to propagate', value=0.1 * x, total_val=1.0)
try:
# TODO: break this out into a shared utility library
create_role_assignment(cli_ctx, role, service_principal_msi_id, is_service_principal, scope=scope)
break
except CloudError as ex:
if ex.message == 'The role assignment already exists.':
break
logger.info(ex.message)
except: # pylint: disable=bare-except
pass
time.sleep(delay + delay * x)
else:
return False
hook.add(message='AAD role propagation done', value=1.0, total_val=1.0)
logger.info('AAD role propagation done')
return True
def delete_role_assignments(cli_ctx, ids=None, assignee=None, role=None, resource_group_name=None,
scope=None, include_inherited=False, yes=None):
factory = get_auth_management_client(cli_ctx, scope)
assignments_client = factory.role_assignments
definitions_client = factory.role_definitions
ids = ids or []
if ids:
if assignee or role or resource_group_name or scope or include_inherited:
raise CLIError('When assignment ids are used, other parameter values are not required')
for i in ids:
assignments_client.delete_by_id(i)
return
if not any([ids, assignee, role, resource_group_name, scope, assignee, yes]):
msg = 'This will delete all role assignments under the subscription. Are you sure?'
if not prompt_y_n(msg, default="n"):
return
scope = _build_role_scope(resource_group_name, scope,
assignments_client.config.subscription_id)
assignments = _search_role_assignments(cli_ctx, assignments_client, definitions_client,
scope, assignee, role, include_inherited,
include_groups=False)
if assignments:
for a in assignments:
assignments_client.delete_by_id(a.id)
def _delete_role_assignments(cli_ctx, role, service_principal, delay=2, scope=None):
# AAD can have delays in propagating data, so sleep and retry
hook = cli_ctx.get_progress_controller(True)
hook.add(message='Waiting for AAD role to delete', value=0, total_val=1.0)
logger.info('Waiting for AAD role to delete')
for x in range(0, 10):
hook.add(message='Waiting for AAD role to delete', value=0.1 * x, total_val=1.0)
try:
delete_role_assignments(cli_ctx,
role=role,
assignee=service_principal,
scope=scope)
break
except CLIError as ex:
raise ex
except CloudError as ex:
logger.info(ex)
time.sleep(delay + delay * x)
else:
return False
hook.add(message='AAD role deletion done', value=1.0, total_val=1.0)
logger.info('AAD role deletion done')
return True
def _search_role_assignments(cli_ctx, assignments_client, definitions_client,
scope, assignee, role, include_inherited, include_groups):
assignee_object_id = None
if assignee:
assignee_object_id = _resolve_object_id(cli_ctx, assignee)
# always use "scope" if provided, so we can get assignments beyond subscription e.g. management groups
if scope:
assignments = list(assignments_client.list_for_scope(
scope=scope, filter='atScope()'))
elif assignee_object_id:
if include_groups:
f = "assignedTo('{}')".format(assignee_object_id)
else:
f = "principalId eq '{}'".format(assignee_object_id)
assignments = list(assignments_client.list(filter=f))
else:
assignments = list(assignments_client.list())
if assignments:
assignments = [a for a in assignments if (
not scope or
include_inherited and re.match(_get_role_property(a, 'scope'), scope, re.I) or
_get_role_property(a, 'scope').lower() == scope.lower()
)]
if role:
role_id = _resolve_role_id(role, scope, definitions_client)
assignments = [i for i in assignments if _get_role_property(
i, 'role_definition_id') == role_id]
if assignee_object_id:
assignments = [i for i in assignments if _get_role_property(
i, 'principal_id') == assignee_object_id]
return assignments
def _get_role_property(obj, property_name):
if isinstance(obj, dict):
return obj[property_name]
return getattr(obj, property_name)
def _get_default_dns_prefix(name, resource_group_name, subscription_id):
# Use subscription id to provide uniqueness and prevent DNS name clashes
name_part = re.sub('[^A-Za-z0-9-]', '', name)[0:10]
if not name_part[0].isalpha():
name_part = (str('a') + name_part)[0:10]
resource_group_part = re.sub('[^A-Za-z0-9-]', '', resource_group_name)[0:16]
return '{}-{}-{}'.format(name_part, resource_group_part, subscription_id[0:6])
def list_acs_locations(cmd, client):
return {
"productionRegions": regions_in_prod,
"previewRegions": regions_in_preview
}
def _generate_windows_profile(windows, admin_username, admin_password):
if windows:
if not admin_password:
raise CLIError('--admin-password is required.')
if len(admin_password) < 6:
raise CLIError('--admin-password must be at least 6 characters')
windows_profile = {
"adminUsername": admin_username,
"adminPassword": admin_password,
}
return windows_profile
return None
def _generate_master_pool_profile(api_version, master_profile, master_count, dns_name_prefix,
master_vm_size, master_osdisk_size, master_vnet_subnet_id,
master_first_consecutive_static_ip, master_storage_profile):
master_pool_profile = {}
default_master_pool_profile = {
"count": int(master_count),
"dnsPrefix": dns_name_prefix + 'mgmt',
}
if api_version == "2017-07-01":
default_master_pool_profile = _update_dict(default_master_pool_profile, {
"count": int(master_count),
"dnsPrefix": dns_name_prefix + 'mgmt',
"vmSize": master_vm_size,
"osDiskSizeGB": int(master_osdisk_size),
"vnetSubnetID": master_vnet_subnet_id,
"firstConsecutiveStaticIP": master_first_consecutive_static_ip,
"storageProfile": master_storage_profile,
})
if not master_profile:
master_pool_profile = default_master_pool_profile
else:
master_pool_profile = _update_dict(default_master_pool_profile, master_profile)
return master_pool_profile
def _generate_agent_pool_profiles(api_version, agent_profiles, agent_count, dns_name_prefix,
agent_vm_size, os_type, agent_osdisk_size, agent_vnet_subnet_id,
agent_ports, agent_storage_profile):
agent_pool_profiles = []
default_agent_pool_profile = {
"count": int(agent_count),
"vmSize": agent_vm_size,
"osType": os_type,
"dnsPrefix": dns_name_prefix + 'agent',
}
if api_version == "2017-07-01":
default_agent_pool_profile = _update_dict(default_agent_pool_profile, {
"count": int(agent_count),
"vmSize": agent_vm_size,
"osDiskSizeGB": int(agent_osdisk_size),
"osType": os_type,
"dnsPrefix": dns_name_prefix + 'agent',
"vnetSubnetID": agent_vnet_subnet_id,
"ports": agent_ports,
"storageProfile": agent_storage_profile,
})
if agent_profiles is None:
agent_pool_profiles.append(_update_dict(default_agent_pool_profile, {"name": "agentpool0"}))
else:
# override agentPoolProfiles by using the passed in agent_profiles
for idx, ap in enumerate(agent_profiles):
# if the user specified dnsPrefix, we honor that
# otherwise, we use the idx to avoid duplicate dns name
a = _update_dict({"dnsPrefix": dns_name_prefix + 'agent' + str(idx)}, ap)
agent_pool_profiles.append(_update_dict(default_agent_pool_profile, a))
return agent_pool_profiles
def _generate_outputs(name, orchestrator_type, admin_username):
# define outputs
outputs = {
"masterFQDN": {
"type": "string",
"value": "[reference(concat('Microsoft.ContainerService/containerServices/', '{}')).masterProfile.fqdn]".format(name) # pylint: disable=line-too-long
},
"sshMaster0": {
"type": "string",
"value": "[concat('ssh ', '{0}', '@', reference(concat('Microsoft.ContainerService/containerServices/', '{1}')).masterProfile.fqdn, ' -A -p 22')]".format(admin_username, name) # pylint: disable=line-too-long
},
}
if orchestrator_type.lower() != "kubernetes":
outputs["agentFQDN"] = {
"type": "string",
"value": "[reference(concat('Microsoft.ContainerService/containerServices/', '{}')).agentPoolProfiles[0].fqdn]".format(name) # pylint: disable=line-too-long
}
# override sshMaster0 for non-kubernetes scenarios
outputs["sshMaster0"] = {
"type": "string",
"value": "[concat('ssh ', '{0}', '@', reference(concat('Microsoft.ContainerService/containerServices/', '{1}')).masterProfile.fqdn, ' -A -p 2200')]".format(admin_username, name) # pylint: disable=line-too-long
}
return outputs
def _generate_properties(api_version, orchestrator_type, orchestrator_version, master_pool_profile,
agent_pool_profiles, ssh_key_value, admin_username, windows_profile):
properties = {
"orchestratorProfile": {
"orchestratorType": orchestrator_type,
},
"masterProfile": master_pool_profile,
"agentPoolProfiles": agent_pool_profiles,
"linuxProfile": {
"ssh": {
"publicKeys": [
{
"keyData": ssh_key_value
}
]
},
"adminUsername": admin_username
},
}
if api_version == "2017-07-01":
properties["orchestratorProfile"]["orchestratorVersion"] = orchestrator_version
if windows_profile is not None:
properties["windowsProfile"] = windows_profile
return properties
def _get_user_assigned_identity_client_id(cli_ctx, resource_id):
msi_client = get_msi_client(cli_ctx)
pattern = '/subscriptions/.*?/resourcegroups/(.*?)/providers/microsoft.managedidentity/userassignedidentities/(.*)'
resource_id = resource_id.lower()
match = re.search(pattern, resource_id)
if match:
resource_group_name = match.group(1)
identity_name = match.group(2)
try:
identity = msi_client.user_assigned_identities.get(resource_group_name=resource_group_name,
resource_name=identity_name)
except CloudError as ex:
if 'was not found' in ex.message:
raise ResourceNotFoundError("Identity {} not found.".format(resource_id))
raise ClientRequestError(ex.message)
return identity.client_id
raise InvalidArgumentValueError("Cannot parse identity name from provided resource id {}.".format(resource_id))
# pylint: disable=too-many-locals
def acs_create(cmd, client, resource_group_name, deployment_name, name, ssh_key_value, dns_name_prefix=None,
location=None, admin_username="azureuser", api_version=None, master_profile=None,
master_vm_size="Standard_D2_v2", master_osdisk_size=0, master_count=1, master_vnet_subnet_id="",
master_first_consecutive_static_ip="10.240.255.5", master_storage_profile="",
agent_profiles=None, agent_vm_size="Standard_D2_v2", agent_osdisk_size=0,
agent_count=3, agent_vnet_subnet_id="", agent_ports=None, agent_storage_profile="",
orchestrator_type="DCOS", orchestrator_version="", service_principal=None, client_secret=None, tags=None,
windows=False, admin_password="", generate_ssh_keys=False, # pylint: disable=unused-argument
validate=False, no_wait=False):
"""Create a new Acs.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param deployment_name: The name of the deployment.
:type deployment_name: str
:param dns_name_prefix: Sets the Domain name prefix for the cluster.
The concatenation of the domain name and the regionalized DNS zone
make up the fully qualified domain name associated with the public
IP address.
:type dns_name_prefix: str
:param name: Resource name for the container service.
:type name: str
:param ssh_key_value: Configure all linux machines with the SSH RSA
public key string. Your key should include three parts, for example
'ssh-rsa AAAAB...snip...UcyupgH azureuser@linuxvm
:type ssh_key_value: str
:param content_version: If included it must match the ContentVersion
in the template.
:type content_version: str
:param admin_username: User name for the Linux Virtual Machines.
:type admin_username: str
:param api_version: ACS API version to use
:type api_version: str
:param master_profile: MasterProfile used to describe master pool
:type master_profile: dict
:param master_vm_size: The size of master pool Virtual Machine
:type master_vm_size: str
:param master_osdisk_size: The osDisk size in GB of master pool Virtual Machine
:type master_osdisk_size: int
:param master_count: The number of masters for the cluster.
:type master_count: int
:param master_vnet_subnet_id: The vnet subnet id for master pool
:type master_vnet_subnet_id: str
:param master_storage_profile: The storage profile used for master pool.
Possible value could be StorageAccount, ManagedDisk.
:type master_storage_profile: str
:param agent_profiles: AgentPoolProfiles used to describe agent pools
:type agent_profiles: dict
:param agent_vm_size: The size of the Virtual Machine.
:type agent_vm_size: str
:param agent_osdisk_size: The osDisk size in GB of agent pool Virtual Machine
:type agent_osdisk_size: int
:param agent_vnet_subnet_id: The vnet subnet id for master pool
:type agent_vnet_subnet_id: str
:param agent_ports: the ports exposed on the agent pool
:type agent_ports: list
:param agent_storage_profile: The storage profile used for agent pool.
Possible value could be StorageAccount, ManagedDisk.
:type agent_storage_profile: str
:param location: Location for VM resources.
:type location: str
:param orchestrator_type: The type of orchestrator used to manage the
applications on the cluster.
:type orchestrator_type: str or :class:`orchestratorType
<Default.models.orchestratorType>`
:param tags: Tags object.
:type tags: object
:param windows: If true, the cluster will be built for running Windows container.
:type windows: bool
:param admin_password: The adminstration password for Windows nodes. Only available if --windows=true
:type admin_password: str
:param bool raw: returns the direct response alongside the
deserialized response
:rtype:
:class:`AzureOperationPoller<msrestazure.azure_operation.AzureOperationPoller>`
instance that returns :class:`DeploymentExtended
<Default.models.DeploymentExtended>`
:rtype: :class:`ClientRawResponse<msrest.pipeline.ClientRawResponse>`
if raw=true
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
if ssh_key_value is not None and not is_valid_ssh_rsa_public_key(ssh_key_value):
raise CLIError('Provided ssh key ({}) is invalid or non-existent'.format(ssh_key_value))
subscription_id = get_subscription_id(cmd.cli_ctx)
if not dns_name_prefix:
dns_name_prefix = _get_default_dns_prefix(name, resource_group_name, subscription_id)
rg_location = _get_rg_location(cmd.cli_ctx, resource_group_name)
if location is None:
location = rg_location
# if api-version is not specified, or specified in a version not supported
# override based on location
if api_version is None or api_version not in ["2017-01-31", "2017-07-01"]:
if location in regions_in_preview:
api_version = "2017-07-01" # 2017-07-01 supported in the preview locations
else:
api_version = "2017-01-31" # 2017-01-31 applied to other locations
if orchestrator_type.lower() == 'kubernetes':
principal_obj = _ensure_service_principal(cmd.cli_ctx, service_principal, client_secret, subscription_id,
dns_name_prefix, location, name)
client_secret = principal_obj.get("client_secret")
service_principal = principal_obj.get("service_principal")
elif windows:
raise CLIError('--windows is only supported for Kubernetes clusters')
# set location if void
if not location:
location = '[resourceGroup().location]'
# set os_type
os_type = 'Linux'
if windows:
os_type = 'Windows'
# set agent_ports if void
if not agent_ports:
agent_ports = []
# get windows_profile
windows_profile = _generate_windows_profile(windows, admin_username, admin_password)
# The resources.properties fields should match with ContainerServices' api model
master_pool_profile = _generate_master_pool_profile(api_version, master_profile, master_count, dns_name_prefix,
master_vm_size, master_osdisk_size, master_vnet_subnet_id,
master_first_consecutive_static_ip, master_storage_profile)
agent_pool_profiles = _generate_agent_pool_profiles(api_version, agent_profiles, agent_count, dns_name_prefix,
agent_vm_size, os_type, agent_osdisk_size, agent_vnet_subnet_id,
agent_ports, agent_storage_profile)
outputs = _generate_outputs(name, orchestrator_type, admin_username)
properties = _generate_properties(api_version, orchestrator_type, orchestrator_version, master_pool_profile,
agent_pool_profiles, ssh_key_value, admin_username, windows_profile)
resource = {
"apiVersion": api_version,
"location": location,
"type": "Microsoft.ContainerService/containerServices",
"name": name,
"tags": tags,
"properties": properties,
}
template = {
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"resources": [
resource,
],
"outputs": outputs,
}
params = {}
if service_principal is not None and client_secret is not None:
properties["servicePrincipalProfile"] = {
"clientId": service_principal,
"secret": "[parameters('clientSecret')]",
}
template["parameters"] = {
"clientSecret": {
"type": "secureString",
"metadata": {
"description": "The client secret for the service principal"
}
}
}
params = {
"clientSecret": {
"value": client_secret
}
}
# Due to SPN replication latency, we do a few retries here
max_retry = 30
retry_exception = Exception(None)
for _ in range(0, max_retry):
try:
return _invoke_deployment(cmd, resource_group_name, deployment_name,
template, params, validate, no_wait)
except CloudError as ex:
retry_exception = ex
if 'is not valid according to the validation procedure' in ex.message or \
'The credentials in ServicePrincipalProfile were invalid' in ex.message or \
'not found in Active Directory tenant' in ex.message:
time.sleep(3)
else:
raise ex
raise retry_exception
def store_acs_service_principal(subscription_id, client_secret, service_principal,
file_name='acsServicePrincipal.json'):
obj = {}
if client_secret:
obj['client_secret'] = client_secret
if service_principal:
obj['service_principal'] = service_principal
config_path = os.path.join(get_config_dir(), file_name)
full_config = load_service_principals(config_path=config_path)
if not full_config:
full_config = {}
full_config[subscription_id] = obj
with os.fdopen(os.open(config_path, os.O_RDWR | os.O_CREAT | os.O_TRUNC, 0o600),
'w+') as spFile:
json.dump(full_config, spFile)
def load_acs_service_principal(subscription_id, file_name='acsServicePrincipal.json'):
config_path = os.path.join(get_config_dir(), file_name)
config = load_service_principals(config_path)
if not config:
return None
return config.get(subscription_id)
def load_service_principals(config_path):
if not os.path.exists(config_path):
return None
fd = os.open(config_path, os.O_RDONLY)
try:
with os.fdopen(fd) as f:
return shell_safe_json_parse(f.read())
except: # pylint: disable=bare-except
return None
def _invoke_deployment(cmd, resource_group_name, deployment_name, template, parameters, validate, no_wait,
subscription_id=None):
from azure.cli.core.profiles import ResourceType
DeploymentProperties = cmd.get_models('DeploymentProperties', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
properties = DeploymentProperties(template=template, parameters=parameters, mode='incremental')
smc = get_mgmt_service_client(cmd.cli_ctx, ResourceType.MGMT_RESOURCE_RESOURCES,
subscription_id=subscription_id).deployments
if validate:
logger.info('==== BEGIN TEMPLATE ====')
logger.info(json.dumps(template, indent=2))
logger.info('==== END TEMPLATE ====')
if cmd.supported_api_version(min_api='2019-10-01', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES):
Deployment = cmd.get_models('Deployment', resource_type=ResourceType.MGMT_RESOURCE_RESOURCES)
deployment = Deployment(properties=properties)
if validate:
validation_poller = smc.validate(resource_group_name, deployment_name, deployment)
return LongRunningOperation(cmd.cli_ctx)(validation_poller)
return sdk_no_wait(no_wait, smc.create_or_update, resource_group_name, deployment_name, deployment)
if validate:
return smc.validate(resource_group_name, deployment_name, properties)
return sdk_no_wait(no_wait, smc.create_or_update, resource_group_name, deployment_name, properties)
def k8s_get_credentials(cmd, client, name, resource_group_name,
path=os.path.join(os.path.expanduser('~'), '.kube', 'config'),
ssh_key_file=None,
overwrite_existing=False):
"""Download and install kubectl credentials from the cluster master
:param name: The name of the cluster.
:type name: str
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param path: Where to install the kubectl config file
:type path: str
:param ssh_key_file: Path to an SSH key file to use
:type ssh_key_file: str
"""
acs_info = _get_acs_info(cmd.cli_ctx, name, resource_group_name)
_k8s_get_credentials_internal(name, acs_info, path, ssh_key_file, overwrite_existing)
def _k8s_get_credentials_internal(name, acs_info, path, ssh_key_file, overwrite_existing):
if ssh_key_file is not None and not os.path.isfile(ssh_key_file):
raise CLIError('Private key file {} does not exist'.format(ssh_key_file))
dns_prefix = acs_info.master_profile.dns_prefix # pylint: disable=no-member
location = acs_info.location # pylint: disable=no-member
user = acs_info.linux_profile.admin_username # pylint: disable=no-member
_mkdir_p(os.path.dirname(path))
path_candidate = path
ix = 0
while os.path.exists(path_candidate):
ix += 1
path_candidate = '{}-{}-{}'.format(path, name, ix)
# TODO: this only works for public cloud, need other casing for national clouds
acs_client.secure_copy(user, '{}.{}.cloudapp.azure.com'.format(dns_prefix, location),
'.kube/config', path_candidate, key_filename=ssh_key_file)
# merge things
if path_candidate != path:
try:
merge_kubernetes_configurations(path, path_candidate, overwrite_existing)
except yaml.YAMLError as exc:
logger.warning('Failed to merge credentials to kube config file: %s', exc)
logger.warning('The credentials have been saved to %s', path_candidate)
def _handle_merge(existing, addition, key, replace):
if not addition.get(key, False):
return
if not existing.get(key):
existing[key] = addition[key]
return
for i in addition[key]:
for j in existing[key]:
if not i.get('name', False) or not j.get('name', False):
continue
if i['name'] == j['name']:
if replace or i == j:
existing[key].remove(j)
else:
msg = 'A different object named {} already exists in your kubeconfig file.\nOverwrite?'
overwrite = False
try:
overwrite = prompt_y_n(msg.format(i['name']))
except NoTTYException:
pass
if overwrite:
existing[key].remove(j)
else:
msg = 'A different object named {} already exists in {} in your kubeconfig file.'
raise CLIError(msg.format(i['name'], key))
existing[key].append(i)
def load_kubernetes_configuration(filename):
try:
with open(filename) as stream:
return yaml.safe_load(stream)
except (IOError, OSError) as ex:
if getattr(ex, 'errno', 0) == errno.ENOENT:
raise CLIError('{} does not exist'.format(filename))
raise
except (yaml.parser.ParserError, UnicodeDecodeError) as ex:
raise CLIError('Error parsing {} ({})'.format(filename, str(ex)))
def merge_kubernetes_configurations(existing_file, addition_file, replace, context_name=None):
existing = load_kubernetes_configuration(existing_file)
addition = load_kubernetes_configuration(addition_file)
if context_name is not None:
addition['contexts'][0]['name'] = context_name
addition['contexts'][0]['context']['cluster'] = context_name
addition['clusters'][0]['name'] = context_name
addition['current-context'] = context_name
# rename the admin context so it doesn't overwrite the user context
for ctx in addition.get('contexts', []):
try:
if ctx['context']['user'].startswith('clusterAdmin'):
admin_name = ctx['name'] + '-admin'
addition['current-context'] = ctx['name'] = admin_name
break
except (KeyError, TypeError):
continue
if addition is None:
raise CLIError('failed to load additional configuration from {}'.format(addition_file))
if existing is None:
existing = addition
else:
_handle_merge(existing, addition, 'clusters', replace)
_handle_merge(existing, addition, 'users', replace)
_handle_merge(existing, addition, 'contexts', replace)
existing['current-context'] = addition['current-context']
# check that ~/.kube/config is only read- and writable by its owner
if platform.system() != 'Windows':
existing_file_perms = "{:o}".format(stat.S_IMODE(os.lstat(existing_file).st_mode))
if not existing_file_perms.endswith('600'):
logger.warning('%s has permissions "%s".\nIt should be readable and writable only by its owner.',
existing_file, existing_file_perms)
with open(existing_file, 'w+') as stream:
yaml.safe_dump(existing, stream, default_flow_style=False)
current_context = addition.get('current-context', 'UNKNOWN')
msg = 'Merged "{}" as current context in {}'.format(current_context, existing_file)
print(msg)
def _get_host_name(acs_info):
"""
Gets the FQDN from the acs_info object.
:param acs_info: ContainerService object from Azure REST API
:type acs_info: ContainerService
"""
if acs_info is None:
raise CLIError('Missing acs_info')
if acs_info.master_profile is None:
raise CLIError('Missing master_profile')
if acs_info.master_profile.fqdn is None:
raise CLIError('Missing fqdn')
return acs_info.master_profile.fqdn
def _get_username(acs_info):
"""
Gets the admin user name from the Linux profile of the ContainerService object.
:param acs_info: ContainerService object from Azure REST API
:type acs_info: ContainerService
"""
if acs_info.linux_profile is not None:
return acs_info.linux_profile.admin_username
return None
def _get_acs_info(cli_ctx, name, resource_group_name):
"""
Gets the ContainerService object from Azure REST API.
:param name: ACS resource name
:type name: String
:param resource_group_name: Resource group name
:type resource_group_name: String
"""
container_services = cf_container_services(cli_ctx, None)
return container_services.get(resource_group_name, name)
def _rand_str(n):
"""
Gets a random string
"""
choices = string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(choices) for _ in range(n))
def _mkdir_p(path):
# http://stackoverflow.com/a/600612
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def update_acs(cmd, client, resource_group_name, container_service_name, new_agent_count):
instance = client.get(resource_group_name, container_service_name)
instance.agent_pool_profiles[0].count = new_agent_count # pylint: disable=no-member
# null out the service principal because otherwise validation complains
if instance.orchestrator_profile.orchestrator_type == ContainerServiceOrchestratorTypes.kubernetes:
instance.service_principal_profile = None
# null out the windows profile so that validation doesn't complain about not having the admin password
instance.windows_profile = None
return client.create_or_update(resource_group_name, container_service_name, instance)
def list_container_services(cmd, client, resource_group_name=None):
''' List Container Services. '''
svc_list = client.list_by_resource_group(resource_group_name=resource_group_name) \
if resource_group_name else client.list()
return list(svc_list)
def show_service_principal(client, identifier):
object_id = _resolve_service_principal(client, identifier)
return client.get(object_id)
def _resolve_service_principal(client, identifier):
# todo: confirm with graph team that a service principal name must be unique
result = list(client.list(filter="servicePrincipalNames/any(c:c eq '{}')".format(identifier)))
if result:
return result[0].object_id
try:
uuid.UUID(identifier)
return identifier # assume an object id
except ValueError:
raise CLIError("service principal '{}' doesn't exist".format(identifier))
def create_application(client, display_name, homepage, identifier_uris,
available_to_other_tenants=False, password=None, reply_urls=None,
key_value=None, key_type=None, key_usage=None, start_date=None,
end_date=None, required_resource_accesses=None):
from azure.graphrbac.models import GraphErrorException
password_creds, key_creds = _build_application_creds(password, key_value, key_type,
key_usage, start_date, end_date)
app_create_param = ApplicationCreateParameters(available_to_other_tenants=available_to_other_tenants,
display_name=display_name,
identifier_uris=identifier_uris,
homepage=homepage,
reply_urls=reply_urls,
key_credentials=key_creds,
password_credentials=password_creds,
required_resource_access=required_resource_accesses)
try:
result = client.create(app_create_param, raw=True)
return result.output, result.response.headers["ocp-aad-session-key"]
except GraphErrorException as ex:
if 'insufficient privileges' in str(ex).lower():
link = 'https://docs.microsoft.com/azure/azure-resource-manager/resource-group-create-service-principal-portal' # pylint: disable=line-too-long
raise CLIError("Directory permission is needed for the current user to register the application. "
"For how to configure, please refer '{}'. Original error: {}".format(link, ex))
raise
def update_application(client, object_id, display_name, homepage, identifier_uris,
available_to_other_tenants=False, password=None, reply_urls=None,
key_value=None, key_type=None, key_usage=None, start_date=None,
end_date=None, required_resource_accesses=None):
from azure.graphrbac.models import GraphErrorException
password_creds, key_creds = _build_application_creds(password, key_value, key_type,
key_usage, start_date, end_date)
try:
if key_creds:
client.update_key_credentials(object_id, key_creds)
if password_creds:
client.update_password_credentials(object_id, password_creds)
if reply_urls:
client.patch(object_id, ApplicationUpdateParameters(reply_urls=reply_urls))
return
except GraphErrorException as ex:
if 'insufficient privileges' in str(ex).lower():
link = 'https://docs.microsoft.com/azure/azure-resource-manager/resource-group-create-service-principal-portal' # pylint: disable=line-too-long
raise CLIError("Directory permission is needed for the current user to register the application. "
"For how to configure, please refer '{}'. Original error: {}".format(link, ex))
raise
def _build_application_creds(password=None, key_value=None, key_type=None,
key_usage=None, start_date=None, end_date=None):
if password and key_value:
raise CLIError('specify either --password or --key-value, but not both.')
if not start_date:
start_date = datetime.datetime.utcnow()
elif isinstance(start_date, str):
start_date = dateutil.parser.parse(start_date)
if not end_date:
end_date = start_date + relativedelta(years=1)
elif isinstance(end_date, str):
end_date = dateutil.parser.parse(end_date)
key_type = key_type or 'AsymmetricX509Cert'
key_usage = key_usage or 'Verify'
password_creds = None
key_creds = None
if password:
password_creds = [PasswordCredential(start_date=start_date, end_date=end_date,
key_id=str(uuid.uuid4()), value=password)]
elif key_value:
key_creds = [KeyCredential(start_date=start_date, end_date=end_date, value=key_value,
key_id=str(uuid.uuid4()), usage=key_usage, type=key_type)]
return (password_creds, key_creds)
def create_service_principal(cli_ctx, identifier, resolve_app=True, rbac_client=None):
if rbac_client is None:
rbac_client = get_graph_rbac_management_client(cli_ctx)
if resolve_app:
try:
uuid.UUID(identifier)
result = list(rbac_client.applications.list(filter="appId eq '{}'".format(identifier)))
except ValueError:
result = list(rbac_client.applications.list(
filter="identifierUris/any(s:s eq '{}')".format(identifier)))
if not result: # assume we get an object id
result = [rbac_client.applications.get(identifier)]
app_id = result[0].app_id
else:
app_id = identifier
return rbac_client.service_principals.create(ServicePrincipalCreateParameters(app_id=app_id, account_enabled=True))
def create_role_assignment(cli_ctx, role, assignee, is_service_principal, resource_group_name=None, scope=None):
return _create_role_assignment(cli_ctx,
role, assignee, resource_group_name,
scope, resolve_assignee=is_service_principal)
def _create_role_assignment(cli_ctx, role, assignee,
resource_group_name=None, scope=None, resolve_assignee=True):
from azure.cli.core.profiles import ResourceType, get_sdk
factory = get_auth_management_client(cli_ctx, scope)
assignments_client = factory.role_assignments
definitions_client = factory.role_definitions
scope = _build_role_scope(resource_group_name, scope, assignments_client.config.subscription_id)
role_id = _resolve_role_id(role, scope, definitions_client)
# If the cluster has service principal resolve the service principal client id to get the object id,
# if not use MSI object id.
object_id = _resolve_object_id(cli_ctx, assignee) if resolve_assignee else assignee
RoleAssignmentCreateParameters = get_sdk(cli_ctx, ResourceType.MGMT_AUTHORIZATION,
'RoleAssignmentCreateParameters', mod='models',
operation_group='role_assignments')
parameters = RoleAssignmentCreateParameters(role_definition_id=role_id, principal_id=object_id)
assignment_name = uuid.uuid4()
custom_headers = None
return assignments_client.create(scope, assignment_name, parameters, custom_headers=custom_headers)
def _build_role_scope(resource_group_name, scope, subscription_id):
subscription_scope = '/subscriptions/' + subscription_id
if scope:
if resource_group_name:
err = 'Resource group "{}" is redundant because scope is supplied'
raise CLIError(err.format(resource_group_name))
elif resource_group_name:
scope = subscription_scope + '/resourceGroups/' + resource_group_name
else:
scope = subscription_scope
return scope
def _resolve_role_id(role, scope, definitions_client):
role_id = None
try:
uuid.UUID(role)
role_id = role
except ValueError:
pass
if not role_id: # retrieve role id
role_defs = list(definitions_client.list(scope, "roleName eq '{}'".format(role)))
if not role_defs:
raise CLIError("Role '{}' doesn't exist.".format(role))
if len(role_defs) > 1:
ids = [r.id for r in role_defs]
err = "More than one role matches the given name '{}'. Please pick a value from '{}'"
raise CLIError(err.format(role, ids))
role_id = role_defs[0].id
return role_id
def _resolve_object_id(cli_ctx, assignee):
client = get_graph_rbac_management_client(cli_ctx)
result = None
if assignee.find('@') >= 0: # looks like a user principal name
result = list(client.users.list(filter="userPrincipalName eq '{}'".format(assignee)))
if not result:
result = list(client.service_principals.list(
filter="servicePrincipalNames/any(c:c eq '{}')".format(assignee)))
if not result: # assume an object id, let us verify it
result = _get_object_stubs(client, [assignee])
# 2+ matches should never happen, so we only check 'no match' here
if not result:
raise CLIError("No matches in graph database for '{}'".format(assignee))
return result[0].object_id
def _get_object_stubs(graph_client, assignees):
params = GetObjectsParameters(include_directory_object_references=True,
object_ids=assignees)
return list(graph_client.objects.get_objects_by_object_ids(params))
def _update_dict(dict1, dict2):
cp = dict1.copy()
cp.update(dict2)
return cp
def subnet_role_assignment_exists(cli_ctx, scope):
network_contributor_role_id = "4d97b98b-1d4f-4787-a291-c67834d212e7"
factory = get_auth_management_client(cli_ctx, scope)
assignments_client = factory.role_assignments
for i in assignments_client.list_for_scope(scope=scope, filter='atScope()'):
if i.scope == scope and i.role_definition_id.endswith(network_contributor_role_id):
return True
return False
def aks_check_acr(cmd, client, resource_group_name, name, acr):
if not which("kubectl"):
raise ValidationError("Can not find kubectl executable in PATH")
_, browse_path = tempfile.mkstemp()
aks_get_credentials(
cmd, client, resource_group_name, name, admin=False, path=browse_path
)
# Get kubectl minor version
kubectl_minor_version = -1
try:
cmd = f"kubectl version -o json --kubeconfig {browse_path}"
output = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
jsonS, _ = output.communicate()
kubectl_version = json.loads(jsonS)
kubectl_minor_version = int(kubectl_version["clientVersion"]["minor"])
if int(kubectl_version["serverVersion"]["minor"]) < 17:
logger.warning('There is a known issue for Kuberentes versions < 1.17 when connecting to '
'ACR using MSI. See https://github.com/kubernetes/kubernetes/pull/96355 for'
'more information.')
except subprocess.CalledProcessError as err:
raise ValidationError("Could not find kubectl minor version: {}".format(err))
if kubectl_minor_version == -1:
raise ValidationError("Failed to get kubectl version")
podName = "canipull-" + str(uuid.uuid4())
overrides = {
"spec": {
"restartPolicy": "Never",
"hostNetwork": True,
"containers": [
{
"securityContext": {"runAsUser": 0},
"name": podName,
"image": CONST_CANIPULL_IMAGE,
"args": ["-v6", acr],
"stdin": True,
"stdinOnce": True,
"tty": True,
"volumeMounts": [
{"name": "azurejson", "mountPath": "/etc/kubernetes"},
{"name": "sslcerts", "mountPath": "/etc/ssl/certs"},
],
}
],
"tolerations": [
{"key": "CriticalAddonsOnly", "operator": "Exists"},
{"effect": "NoExecute", "operator": "Exists"},
],
"volumes": [
{"name": "azurejson", "hostPath": {"path": "/etc/kubernetes"}},
{"name": "sslcerts", "hostPath": {"path": "/etc/ssl/certs"}},
],
}
}
try:
cmd = [
"kubectl",
"run",
"--kubeconfig",
browse_path,
"--rm",
"--quiet",
"--image",
CONST_CANIPULL_IMAGE,
"--overrides",
json.dumps(overrides),
"-it",
podName,
]
# Support kubectl versons < 1.18
if kubectl_minor_version < 18:
cmd += ["--generator=run-pod/v1"]
output = subprocess.check_output(
cmd,
universal_newlines=True,
)
except subprocess.CalledProcessError as err:
raise CLIError("Failed to check the ACR: {}".format(err))
if output:
print(output)
else:
raise CLIError("Failed to check the ACR.")
# pylint: disable=too-many-statements,too-many-branches
def aks_browse(cmd, client, resource_group_name, name, disable_browser=False,
listen_address='127.0.0.1', listen_port='8001'):
# verify the kube-dashboard addon was not disabled
instance = client.get(resource_group_name, name)
addon_profiles = instance.addon_profiles or {}
# addon name is case insensitive
addon_profile = next((addon_profiles[k] for k in addon_profiles
if k.lower() == CONST_KUBE_DASHBOARD_ADDON_NAME.lower()),
ManagedClusterAddonProfile(enabled=False))
# open portal view if addon is not enabled or k8s version >= 1.19.0
if StrictVersion(instance.kubernetes_version) >= StrictVersion('1.19.0') or (not addon_profile.enabled):
subscription_id = get_subscription_id(cmd.cli_ctx)
dashboardURL = (
cmd.cli_ctx.cloud.endpoints.portal + # Azure Portal URL (https://portal.azure.com for public cloud)
('/#resource/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.ContainerService'
'/managedClusters/{2}/workloads').format(subscription_id, resource_group_name, name)
)
if in_cloud_console():
logger.warning('To view the Kubernetes resources view, please open %s in a new tab', dashboardURL)
else:
logger.warning('Kubernetes resources view on %s', dashboardURL)
if not disable_browser:
webbrowser.open_new_tab(dashboardURL)
return
# otherwise open the kube-dashboard addon
if not which('kubectl'):
raise CLIError('Can not find kubectl executable in PATH')
_, browse_path = tempfile.mkstemp()
aks_get_credentials(cmd, client, resource_group_name, name, admin=False, path=browse_path)
# find the dashboard pod's name
try:
dashboard_pod = subprocess.check_output(
["kubectl", "get", "pods", "--kubeconfig", browse_path, "--namespace", "kube-system",
"--output", "name", "--selector", "k8s-app=kubernetes-dashboard"],
universal_newlines=True)
except subprocess.CalledProcessError as err:
raise CLIError('Could not find dashboard pod: {}'.format(err))
if dashboard_pod:
# remove any "pods/" or "pod/" prefix from the name
dashboard_pod = str(dashboard_pod).split('/')[-1].strip()
else:
raise CLIError("Couldn't find the Kubernetes dashboard pod.")
# find the port
try:
dashboard_port = subprocess.check_output(
["kubectl", "get", "pods", "--kubeconfig", browse_path, "--namespace", "kube-system",
"--selector", "k8s-app=kubernetes-dashboard",
"--output", "jsonpath='{.items[0].spec.containers[0].ports[0].containerPort}'"]
)
# output format: b"'{port}'"
dashboard_port = int((dashboard_port.decode('utf-8').replace("'", "")))
except subprocess.CalledProcessError as err:
raise CLIError('Could not find dashboard port: {}'.format(err))
# use https if dashboard container is using https
if dashboard_port == 8443:
protocol = 'https'
else:
protocol = 'http'
proxy_url = 'http://{0}:{1}/'.format(listen_address, listen_port)
dashboardURL = '{0}/api/v1/namespaces/kube-system/services/{1}:kubernetes-dashboard:/proxy/'.format(proxy_url,
protocol)
# launch kubectl port-forward locally to access the remote dashboard
if in_cloud_console():
# TODO: better error handling here.
response = requests.post('http://localhost:8888/openport/{0}'.format(listen_port))
result = json.loads(response.text)
dashboardURL = '{0}api/v1/namespaces/kube-system/services/{1}:kubernetes-dashboard:/proxy/'.format(
result['url'], protocol)
term_id = os.environ.get('ACC_TERM_ID')
if term_id:
response = requests.post('http://localhost:8888/openLink/{0}'.format(term_id),
json={"url": dashboardURL})
logger.warning('To view the console, please open %s in a new tab', dashboardURL)
else:
logger.warning('Proxy running on %s', proxy_url)
logger.warning('Press CTRL+C to close the tunnel...')
if not disable_browser:
wait_then_open_async(dashboardURL)
try:
try:
subprocess.check_output(["kubectl", "--kubeconfig", browse_path, "proxy", "--address",
listen_address, "--port", listen_port], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as err:
if err.output.find(b'unknown flag: --address'):
if listen_address != '127.0.0.1':
logger.warning('"--address" is only supported in kubectl v1.13 and later.')
logger.warning('The "--listen-address" argument will be ignored.')
subprocess.call(["kubectl", "--kubeconfig", browse_path, "proxy", "--port", listen_port])
except KeyboardInterrupt:
# Let command processing finish gracefully after the user presses [Ctrl+C]
pass
finally:
if in_cloud_console():
requests.post('http://localhost:8888/closeport/8001')
def _trim_nodepoolname(nodepool_name):
if not nodepool_name:
return "nodepool1"
return nodepool_name[:12]
def _validate_ssh_key(no_ssh_key, ssh_key_value):
if not no_ssh_key:
try:
if not ssh_key_value or not is_valid_ssh_rsa_public_key(ssh_key_value):
raise ValueError()
except (TypeError, ValueError):
shortened_key = truncate_text(ssh_key_value)
raise CLIError('Provided ssh key ({}) is invalid or non-existent'.format(shortened_key))
def _add_monitoring_role_assignment(result, cluster_resource_id, cmd):
service_principal_msi_id = None
# Check if service principal exists, if it does, assign permissions to service principal
# Else, provide permissions to MSI
if (
hasattr(result, 'service_principal_profile') and
hasattr(result.service_principal_profile, 'client_id') and
result.service_principal_profile.client_id.lower() != 'msi'
):
logger.info('valid service principal exists, using it')
service_principal_msi_id = result.service_principal_profile.client_id
is_service_principal = True
elif (
(hasattr(result, 'addon_profiles')) and
(CONST_MONITORING_ADDON_NAME in result.addon_profiles) and
(hasattr(result.addon_profiles[CONST_MONITORING_ADDON_NAME], 'identity')) and
(hasattr(result.addon_profiles[CONST_MONITORING_ADDON_NAME].identity, 'object_id'))
):
logger.info('omsagent MSI exists, using it')
service_principal_msi_id = result.addon_profiles[CONST_MONITORING_ADDON_NAME].identity.object_id
is_service_principal = False
if service_principal_msi_id is not None:
if not _add_role_assignment(cmd.cli_ctx, 'Monitoring Metrics Publisher',
service_principal_msi_id, is_service_principal, scope=cluster_resource_id):
logger.warning('Could not create a role assignment for Monitoring addon. '
'Are you an Owner on this subscription?')
else:
logger.warning('Could not find service principal or user assigned MSI for role'
'assignment')
def _add_ingress_appgw_addon_role_assignment(result, cmd):
service_principal_msi_id = None
# Check if service principal exists, if it does, assign permissions to service principal
# Else, provide permissions to MSI
if (
hasattr(result, 'service_principal_profile') and
hasattr(result.service_principal_profile, 'client_id') and
result.service_principal_profile.client_id != 'msi'
):
service_principal_msi_id = result.service_principal_profile.client_id
is_service_principal = True
elif (
(hasattr(result, 'addon_profiles')) and
(CONST_INGRESS_APPGW_ADDON_NAME in result.addon_profiles) and
(hasattr(result.addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME], 'identity')) and
(hasattr(result.addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME].identity, 'object_id'))
):
service_principal_msi_id = result.addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME].identity.object_id
is_service_principal = False
if service_principal_msi_id is not None:
config = result.addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME].config
from msrestazure.tools import parse_resource_id, resource_id
if CONST_INGRESS_APPGW_APPLICATION_GATEWAY_ID in config:
appgw_id = config[CONST_INGRESS_APPGW_APPLICATION_GATEWAY_ID]
parsed_appgw_id = parse_resource_id(appgw_id)
appgw_group_id = resource_id(subscription=parsed_appgw_id["subscription"],
resource_group=parsed_appgw_id["resource_group"])
if not _add_role_assignment(cmd.cli_ctx, 'Contributor',
service_principal_msi_id, is_service_principal, scope=appgw_group_id):
logger.warning('Could not create a role assignment for application gateway: %s '
'specified in %s addon. '
'Are you an Owner on this subscription?', appgw_id, CONST_INGRESS_APPGW_ADDON_NAME)
if CONST_INGRESS_APPGW_SUBNET_ID in config:
subnet_id = config[CONST_INGRESS_APPGW_SUBNET_ID]
if not _add_role_assignment(cmd.cli_ctx, 'Network Contributor',
service_principal_msi_id, is_service_principal, scope=subnet_id):
logger.warning('Could not create a role assignment for subnet: %s '
'specified in %s addon. '
'Are you an Owner on this subscription?', subnet_id, CONST_INGRESS_APPGW_ADDON_NAME)
if CONST_INGRESS_APPGW_SUBNET_CIDR in config:
if result.agent_pool_profiles[0].vnet_subnet_id is not None:
parsed_subnet_vnet_id = parse_resource_id(result.agent_pool_profiles[0].vnet_subnet_id)
vnet_id = resource_id(subscription=parsed_subnet_vnet_id["subscription"],
resource_group=parsed_subnet_vnet_id["resource_group"],
namespace="Microsoft.Network",
type="virtualNetworks",
name=parsed_subnet_vnet_id["name"])
if not _add_role_assignment(cmd.cli_ctx, 'Contributor',
service_principal_msi_id, is_service_principal, scope=vnet_id):
logger.warning('Could not create a role assignment for virtual network: %s '
'specified in %s addon. '
'Are you an Owner on this subscription?', vnet_id, CONST_INGRESS_APPGW_ADDON_NAME)
def _add_virtual_node_role_assignment(cmd, result, vnet_subnet_id):
# Remove trailing "/subnets/<SUBNET_NAME>" to get the vnet id
vnet_id = vnet_subnet_id.rpartition('/')[0]
vnet_id = vnet_id.rpartition('/')[0]
service_principal_msi_id = None
is_service_principal = False
os_type = 'Linux'
addon_name = CONST_VIRTUAL_NODE_ADDON_NAME + os_type
# Check if service principal exists, if it does, assign permissions to service principal
# Else, provide permissions to MSI
if (
hasattr(result, 'service_principal_profile') and
hasattr(result.service_principal_profile, 'client_id') and
result.service_principal_profile.client_id.lower() != 'msi'
):
logger.info('valid service principal exists, using it')
service_principal_msi_id = result.service_principal_profile.client_id
is_service_principal = True
elif (
(hasattr(result, 'addon_profiles')) and
(addon_name in result.addon_profiles) and
(hasattr(result.addon_profiles[addon_name], 'identity')) and
(hasattr(result.addon_profiles[addon_name].identity, 'object_id'))
):
logger.info('virtual node MSI exists, using it')
service_principal_msi_id = result.addon_profiles[addon_name].identity.object_id
is_service_principal = False
if service_principal_msi_id is not None:
if not _add_role_assignment(cmd.cli_ctx, 'Contributor',
service_principal_msi_id, is_service_principal, scope=vnet_id):
logger.warning('Could not create a role assignment for virtual node addon. '
'Are you an Owner on this subscription?')
else:
logger.warning('Could not find service principal or user assigned MSI for role'
'assignment')
# pylint: disable=too-many-statements,too-many-branches
def aks_create(cmd, client, resource_group_name, name, ssh_key_value, # pylint: disable=too-many-locals
dns_name_prefix=None,
location=None,
admin_username="azureuser",
windows_admin_username=None,
windows_admin_password=None,
enable_ahub=False,
kubernetes_version='',
node_vm_size="Standard_DS2_v2",
node_osdisk_type=None,
node_osdisk_size=0,
node_osdisk_diskencryptionset_id=None,
node_count=3,
nodepool_name="nodepool1",
nodepool_tags=None,
nodepool_labels=None,
service_principal=None, client_secret=None,
no_ssh_key=False,
disable_rbac=None,
enable_rbac=None,
vm_set_type=None,
skip_subnet_role_assignment=False,
enable_cluster_autoscaler=False,
cluster_autoscaler_profile=None,
network_plugin=None,
network_policy=None,
uptime_sla=False,
pod_cidr=None,
service_cidr=None,
dns_service_ip=None,
docker_bridge_address=None,
load_balancer_sku=None,
load_balancer_managed_outbound_ip_count=None,
load_balancer_outbound_ips=None,
load_balancer_outbound_ip_prefixes=None,
load_balancer_outbound_ports=None,
load_balancer_idle_timeout=None,
outbound_type=None,
enable_addons=None,
workspace_resource_id=None,
vnet_subnet_id=None,
ppg=None,
max_pods=0,
min_count=None,
max_count=None,
aad_client_app_id=None,
aad_server_app_id=None,
aad_server_app_secret=None,
aad_tenant_id=None,
tags=None,
zones=None,
enable_node_public_ip=False,
generate_ssh_keys=False, # pylint: disable=unused-argument
api_server_authorized_ip_ranges=None,
enable_private_cluster=False,
enable_managed_identity=True,
assign_identity=None,
attach_acr=None,
enable_aad=False,
aad_admin_group_object_ids=None,
aci_subnet_name=None,
appgw_name=None,
appgw_subnet_cidr=None,
appgw_id=None,
appgw_subnet_id=None,
appgw_watch_namespace=None,
no_wait=False,
yes=False):
_validate_ssh_key(no_ssh_key, ssh_key_value)
subscription_id = get_subscription_id(cmd.cli_ctx)
if not dns_name_prefix:
dns_name_prefix = _get_default_dns_prefix(name, resource_group_name, subscription_id)
rg_location = _get_rg_location(cmd.cli_ctx, resource_group_name)
if location is None:
location = rg_location
vm_set_type = _set_vm_set_type(vm_set_type, kubernetes_version)
load_balancer_sku = set_load_balancer_sku(load_balancer_sku, kubernetes_version)
if api_server_authorized_ip_ranges and load_balancer_sku == "basic":
raise CLIError('--api-server-authorized-ip-ranges can only be used with standard load balancer')
agent_pool_profile = ManagedClusterAgentPoolProfile(
name=_trim_nodepoolname(nodepool_name), # Must be 12 chars or less before ACS RP adds to it
tags=nodepool_tags,
node_labels=nodepool_labels,
count=int(node_count),
vm_size=node_vm_size,
os_type="Linux",
vnet_subnet_id=vnet_subnet_id,
proximity_placement_group_id=ppg,
availability_zones=zones,
enable_node_public_ip=enable_node_public_ip,
max_pods=int(max_pods) if max_pods else None,
type=vm_set_type,
mode="System"
)
if node_osdisk_size:
agent_pool_profile.os_disk_size_gb = int(node_osdisk_size)
if node_osdisk_type:
agent_pool_profile.os_disk_type = node_osdisk_type
_check_cluster_autoscaler_flag(enable_cluster_autoscaler, min_count, max_count, node_count, agent_pool_profile)
linux_profile = None
# LinuxProfile is just used for SSH access to VMs, so omit it if --no-ssh-key was specified.
if not no_ssh_key:
ssh_config = ContainerServiceSshConfiguration(
public_keys=[ContainerServiceSshPublicKey(key_data=ssh_key_value)])
linux_profile = ContainerServiceLinuxProfile(admin_username=admin_username, ssh=ssh_config)
windows_profile = None
if windows_admin_username or windows_admin_password:
# To avoid that windows_admin_password is set but windows_admin_username is not
if windows_admin_username is None:
try:
from knack.prompting import prompt
windows_admin_username = prompt('windows_admin_username: ')
# The validation for admin_username in ManagedClusterWindowsProfile will fail even if
# users still set windows_admin_username to empty here
except NoTTYException:
raise CLIError('Please specify username for Windows in non-interactive mode.')
if windows_admin_password is None:
try:
windows_admin_password = prompt_pass(
msg='windows-admin-password: ', confirm=True)
except NoTTYException:
raise CLIError(
'Please specify both username and password in non-interactive mode.')
windows_license_type = None
if enable_ahub:
windows_license_type = 'Windows_Server'
windows_profile = ManagedClusterWindowsProfile(
admin_username=windows_admin_username,
admin_password=windows_admin_password,
license_type=windows_license_type)
# If customer explicitly provide a service principal, disable managed identity.
if service_principal and client_secret:
enable_managed_identity = False
# Skip create service principal profile for the cluster if the cluster
# enables managed identity and customer doesn't explicitly provide a service principal.
service_principal_profile = None
principal_obj = None
if not(enable_managed_identity and not service_principal and not client_secret):
principal_obj = _ensure_aks_service_principal(cmd.cli_ctx,
service_principal=service_principal, client_secret=client_secret,
subscription_id=subscription_id, dns_name_prefix=dns_name_prefix,
location=location, name=name)
service_principal_profile = ManagedClusterServicePrincipalProfile(
client_id=principal_obj.get("service_principal"),
secret=principal_obj.get("client_secret"),
key_vault_secret_ref=None)
need_post_creation_vnet_permission_granting = False
if (vnet_subnet_id and not skip_subnet_role_assignment and
not subnet_role_assignment_exists(cmd.cli_ctx, vnet_subnet_id)):
# if service_principal_profile is None, then this cluster is an MSI cluster,
# and the service principal does not exist. Two cases:
# 1. For system assigned identity, we just tell user to grant the
# permission after the cluster is created to keep consistent with portal experience.
# 2. For user assigned identity, we can grant needed permission to
# user provided user assigned identity before creating managed cluster.
if service_principal_profile is None and not assign_identity:
msg = ('It is highly recommended to use USER assigned identity '
'(option --assign-identity) when you want to bring your own'
'subnet, which will have no latency for the role assignment to '
'take effect. When using SYSTEM assigned identity, '
'azure-cli will grant Network Contributor role to the '
'system assigned identity after the cluster is created, and '
'the role assignment will take some time to take effect, see '
'https://docs.microsoft.com/en-us/azure/aks/use-managed-identity, '
'proceed to create cluster with system assigned identity?')
if not yes and not prompt_y_n(msg, default="n"):
return None
need_post_creation_vnet_permission_granting = True
else:
scope = vnet_subnet_id
identity_client_id = ""
if assign_identity:
identity_client_id = _get_user_assigned_identity_client_id(cmd.cli_ctx, assign_identity)
else:
identity_client_id = service_principal_profile.client_id
if not _add_role_assignment(cmd.cli_ctx, 'Network Contributor',
identity_client_id, scope=scope):
logger.warning('Could not create a role assignment for subnet. '
'Are you an Owner on this subscription?')
load_balancer_profile = create_load_balancer_profile(
load_balancer_managed_outbound_ip_count,
load_balancer_outbound_ips,
load_balancer_outbound_ip_prefixes,
load_balancer_outbound_ports,
load_balancer_idle_timeout)
if attach_acr:
if enable_managed_identity:
if no_wait:
raise CLIError('When --attach-acr and --enable-managed-identity are both specified, '
'--no-wait is not allowed, please wait until the whole operation succeeds.')
# Attach acr operation will be handled after the cluster is created
else:
_ensure_aks_acr(cmd.cli_ctx,
client_id=service_principal_profile.client_id,
acr_name_or_id=attach_acr,
subscription_id=subscription_id)
outbound_type = _set_outbound_type(outbound_type, vnet_subnet_id, load_balancer_sku, load_balancer_profile)
network_profile = None
if any([network_plugin, pod_cidr, service_cidr, dns_service_ip,
docker_bridge_address, network_policy]):
if not network_plugin:
raise CLIError('Please explicitly specify the network plugin type')
if pod_cidr and network_plugin == "azure":
raise CLIError('Please use kubenet as the network plugin type when pod_cidr is specified')
network_profile = ContainerServiceNetworkProfile(
network_plugin=network_plugin,
pod_cidr=pod_cidr,
service_cidr=service_cidr,
dns_service_ip=dns_service_ip,
docker_bridge_cidr=docker_bridge_address,
network_policy=network_policy,
load_balancer_sku=load_balancer_sku.lower(),
load_balancer_profile=load_balancer_profile,
outbound_type=outbound_type
)
else:
if load_balancer_sku.lower() == "standard" or load_balancer_profile:
network_profile = ContainerServiceNetworkProfile(
network_plugin="kubenet",
load_balancer_sku=load_balancer_sku.lower(),
load_balancer_profile=load_balancer_profile,
outbound_type=outbound_type,
)
if load_balancer_sku.lower() == "basic":
network_profile = ContainerServiceNetworkProfile(
load_balancer_sku=load_balancer_sku.lower(),
)
addon_profiles = _handle_addons_args(
cmd,
enable_addons,
subscription_id,
resource_group_name,
{},
workspace_resource_id,
aci_subnet_name,
vnet_subnet_id,
appgw_name,
appgw_subnet_cidr,
appgw_id,
appgw_subnet_id,
appgw_watch_namespace,
)
monitoring = False
if CONST_MONITORING_ADDON_NAME in addon_profiles:
monitoring = True
_ensure_container_insights_for_monitoring(cmd, addon_profiles[CONST_MONITORING_ADDON_NAME])
# addon is in the list and is enabled
ingress_appgw_addon_enabled = CONST_INGRESS_APPGW_ADDON_NAME in addon_profiles and \
addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME].enabled
os_type = 'Linux'
enable_virtual_node = False
if CONST_VIRTUAL_NODE_ADDON_NAME + os_type in addon_profiles:
enable_virtual_node = True
aad_profile = None
if enable_aad:
if any([aad_client_app_id, aad_server_app_id, aad_server_app_secret]):
raise CLIError('"--enable-aad" cannot be used together with '
'"--aad-client-app-id/--aad-server-app-id/--aad-server-app-secret"')
aad_profile = ManagedClusterAADProfile(
managed=True,
admin_group_object_ids=_parse_comma_separated_list(aad_admin_group_object_ids),
tenant_id=aad_tenant_id
)
else:
if any([aad_client_app_id, aad_server_app_id, aad_server_app_secret, aad_tenant_id]):
if aad_tenant_id is None:
profile = Profile(cli_ctx=cmd.cli_ctx)
_, _, aad_tenant_id = profile.get_login_credentials()
aad_profile = ManagedClusterAADProfile(
client_app_id=aad_client_app_id,
server_app_id=aad_server_app_id,
server_app_secret=aad_server_app_secret,
tenant_id=aad_tenant_id
)
api_server_access_profile = None
if enable_private_cluster and load_balancer_sku.lower() != "standard":
raise CLIError("Please use standard load balancer for private cluster")
if api_server_authorized_ip_ranges or enable_private_cluster:
api_server_access_profile = _populate_api_server_access_profile(
api_server_authorized_ip_ranges,
enable_private_cluster=enable_private_cluster
)
# Check that both --disable-rbac and --enable-rbac weren't provided
if all([disable_rbac, enable_rbac]):
raise CLIError('specify either "--disable-rbac" or "--enable-rbac", not both.')
identity = None
if not enable_managed_identity and assign_identity:
raise ArgumentUsageError('--assign-identity can only be specified when --enable-managed-identity is specified')
if enable_managed_identity and not assign_identity:
identity = ManagedClusterIdentity(
type="SystemAssigned"
)
elif enable_managed_identity and assign_identity:
user_assigned_identity = {
assign_identity: ManagedClusterIdentityUserAssignedIdentitiesValue()
}
identity = ManagedClusterIdentity(
type="UserAssigned",
user_assigned_identities=user_assigned_identity
)
mc = ManagedCluster(
location=location,
tags=tags,
dns_prefix=dns_name_prefix,
kubernetes_version=kubernetes_version,
enable_rbac=not disable_rbac,
agent_pool_profiles=[agent_pool_profile],
linux_profile=linux_profile,
windows_profile=windows_profile,
service_principal_profile=service_principal_profile,
network_profile=network_profile,
addon_profiles=addon_profiles,
aad_profile=aad_profile,
auto_scaler_profile=cluster_autoscaler_profile,
api_server_access_profile=api_server_access_profile,
identity=identity,
disk_encryption_set_id=node_osdisk_diskencryptionset_id
)
if uptime_sla:
mc.sku = ManagedClusterSKU(
name="Basic",
tier="Paid"
)
# Add AAD session key to header.
# If principal_obj is None, we will not add this header, this can happen
# when the cluster enables managed identity. In this case, the header is useless
# and that's OK to not add this header
custom_headers = None
if principal_obj:
custom_headers = {'Ocp-Aad-Session-Key': principal_obj.get("aad_session_key")}
# Due to SPN replication latency, we do a few retries here
max_retry = 30
retry_exception = Exception(None)
for _ in range(0, max_retry):
try:
need_pull_for_result = (monitoring or
(enable_managed_identity and attach_acr) or
ingress_appgw_addon_enabled or
enable_virtual_node or
need_post_creation_vnet_permission_granting)
if need_pull_for_result:
# adding a wait here since we rely on the result for role assignment
result = LongRunningOperation(cmd.cli_ctx)(client.create_or_update(
resource_group_name=resource_group_name,
resource_name=name,
parameters=mc))
else:
result = sdk_no_wait(no_wait,
client.create_or_update,
resource_group_name=resource_group_name,
resource_name=name,
parameters=mc,
custom_headers=custom_headers)
if monitoring:
cloud_name = cmd.cli_ctx.cloud.name
# add cluster spn/msi Monitoring Metrics Publisher role assignment to publish metrics to MDM
# mdm metrics is supported only in azure public cloud, so add the role assignment only in this cloud
if cloud_name.lower() == 'azurecloud':
from msrestazure.tools import resource_id
cluster_resource_id = resource_id(
subscription=subscription_id,
resource_group=resource_group_name,
namespace='Microsoft.ContainerService', type='managedClusters',
name=name
)
_add_monitoring_role_assignment(result, cluster_resource_id, cmd)
if enable_managed_identity and attach_acr:
if result.identity_profile is None or result.identity_profile["kubeletidentity"] is None:
logger.warning('Your cluster is successfully created, but we failed to attach acr to it, '
'you can manually grant permission to the identity named <ClUSTER_NAME>-agentpool '
'in MC_ resource group to give it permission to pull from ACR.')
else:
kubelet_identity_client_id = result.identity_profile["kubeletidentity"].client_id
_ensure_aks_acr(cmd.cli_ctx,
client_id=kubelet_identity_client_id,
acr_name_or_id=attach_acr,
subscription_id=subscription_id)
if ingress_appgw_addon_enabled:
_add_ingress_appgw_addon_role_assignment(result, cmd)
if enable_virtual_node:
_add_virtual_node_role_assignment(cmd, result, vnet_subnet_id)
if need_post_creation_vnet_permission_granting:
if not _create_role_assignment(cmd.cli_ctx, 'Network Contributor',
result.identity.principal_id, scope=vnet_subnet_id,
resolve_assignee=False):
logger.warning('Could not create a role assignment for subnet. '
'Are you an Owner on this subscription?')
return result
except CloudError as ex:
retry_exception = ex
if 'not found in Active Directory tenant' in ex.message:
time.sleep(3)
else:
raise ex
raise retry_exception
def aks_disable_addons(cmd, client, resource_group_name, name, addons, no_wait=False):
instance = client.get(resource_group_name, name)
subscription_id = get_subscription_id(cmd.cli_ctx)
instance = _update_addons(
cmd,
instance,
subscription_id,
resource_group_name,
name,
addons,
enable=False,
no_wait=no_wait
)
# send the managed cluster representation to update the addon profiles
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
def aks_enable_addons(cmd, client, resource_group_name, name, addons,
workspace_resource_id=None,
subnet_name=None,
appgw_name=None,
appgw_subnet_cidr=None,
appgw_id=None,
appgw_subnet_id=None,
appgw_watch_namespace=None,
no_wait=False):
instance = client.get(resource_group_name, name)
subscription_id = get_subscription_id(cmd.cli_ctx)
instance = _update_addons(cmd, instance, subscription_id, resource_group_name, name, addons, enable=True,
workspace_resource_id=workspace_resource_id,
subnet_name=subnet_name,
appgw_name=appgw_name,
appgw_subnet_cidr=appgw_subnet_cidr,
appgw_id=appgw_id,
appgw_subnet_id=appgw_subnet_id,
appgw_watch_namespace=appgw_watch_namespace,
no_wait=no_wait)
enable_monitoring = CONST_MONITORING_ADDON_NAME in instance.addon_profiles \
and instance.addon_profiles[CONST_MONITORING_ADDON_NAME].enabled
ingress_appgw_addon_enabled = CONST_INGRESS_APPGW_ADDON_NAME in instance.addon_profiles \
and instance.addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME].enabled
os_type = 'Linux'
virtual_node_addon_name = CONST_VIRTUAL_NODE_ADDON_NAME + os_type
enable_virtual_node = (virtual_node_addon_name in instance.addon_profiles and
instance.addon_profiles[virtual_node_addon_name].enabled)
need_pull_for_result = enable_monitoring or ingress_appgw_addon_enabled or enable_virtual_node
if need_pull_for_result:
if enable_monitoring:
_ensure_container_insights_for_monitoring(cmd, instance.addon_profiles[CONST_MONITORING_ADDON_NAME])
# adding a wait here since we rely on the result for role assignment
result = LongRunningOperation(cmd.cli_ctx)(client.create_or_update(resource_group_name, name, instance))
if enable_monitoring:
cloud_name = cmd.cli_ctx.cloud.name
# mdm metrics supported only in Azure Public cloud so add the role assignment only in this cloud
if cloud_name.lower() == 'azurecloud':
from msrestazure.tools import resource_id
cluster_resource_id = resource_id(
subscription=subscription_id,
resource_group=resource_group_name,
namespace='Microsoft.ContainerService', type='managedClusters',
name=name
)
_add_monitoring_role_assignment(result, cluster_resource_id, cmd)
if ingress_appgw_addon_enabled:
_add_ingress_appgw_addon_role_assignment(result, cmd)
if enable_virtual_node:
# All agent pool will reside in the same vnet, we will grant vnet level Contributor role
# in later function, so using a random agent pool here is OK
random_agent_pool = result.agent_pool_profiles[0]
if random_agent_pool.vnet_subnet_id != "":
_add_virtual_node_role_assignment(cmd, result, random_agent_pool.vnet_subnet_id)
# Else, the cluster is not using custom VNet, the permission is already granted in AKS RP,
# we don't need to handle it in client side in this case.
else:
result = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name, name, instance)
return result
def aks_get_versions(cmd, client, location):
return client.list_orchestrators(location, resource_type='managedClusters')
def aks_get_credentials(cmd, client, resource_group_name, name, admin=False,
path=os.path.join(os.path.expanduser('~'), '.kube', 'config'),
overwrite_existing=False, context_name=None):
credentialResults = None
if admin:
credentialResults = client.list_cluster_admin_credentials(resource_group_name, name)
else:
credentialResults = client.list_cluster_user_credentials(resource_group_name, name)
if not credentialResults:
raise CLIError("No Kubernetes credentials found.")
try:
kubeconfig = credentialResults.kubeconfigs[0].value.decode(encoding='UTF-8')
_print_or_merge_credentials(path, kubeconfig, overwrite_existing, context_name)
except (IndexError, ValueError):
raise CLIError("Fail to find kubeconfig file.")
def aks_list(cmd, client, resource_group_name=None):
if resource_group_name:
managed_clusters = client.list_by_resource_group(resource_group_name)
else:
managed_clusters = client.list()
return _remove_nulls(list(managed_clusters))
def aks_show(cmd, client, resource_group_name, name):
mc = client.get(resource_group_name, name)
return _remove_nulls([mc])[0]
def aks_update_credentials(cmd, client, resource_group_name, name,
reset_service_principal=False,
reset_aad=False,
service_principal=None,
client_secret=None,
aad_server_app_id=None,
aad_server_app_secret=None,
aad_client_app_id=None,
aad_tenant_id=None,
no_wait=False):
if bool(reset_service_principal) == bool(reset_aad):
raise CLIError('usage error: --reset-service-principal | --reset-aad-profile')
if reset_service_principal:
if service_principal is None or client_secret is None:
raise CLIError('usage error: --reset-service-principal --service-principal ID --client-secret SECRET')
return sdk_no_wait(no_wait,
client.reset_service_principal_profile,
resource_group_name,
name, service_principal, client_secret)
if not all([aad_client_app_id, aad_server_app_id, aad_server_app_secret]):
raise CLIError('usage error: --reset-aad --aad-client-app-id ID --aad-server-app-id ID '
'--aad-server-app-secret SECRET [--aad-tenant-id ID]')
parameters = {
'clientAppID': aad_client_app_id,
'serverAppID': aad_server_app_id,
'serverAppSecret': aad_server_app_secret,
'tenantID': aad_tenant_id
}
return sdk_no_wait(no_wait,
client.reset_aad_profile,
resource_group_name,
name, parameters)
def aks_scale(cmd, client, resource_group_name, name, node_count, nodepool_name="", no_wait=False):
instance = client.get(resource_group_name, name)
if len(instance.agent_pool_profiles) > 1 and nodepool_name == "":
raise CLIError('There are more than one node pool in the cluster. '
'Please specify nodepool name or use az aks nodepool command to scale node pool')
for agent_profile in instance.agent_pool_profiles:
if agent_profile.name == nodepool_name or (nodepool_name == "" and len(instance.agent_pool_profiles) == 1):
if agent_profile.enable_auto_scaling:
raise CLIError("Cannot scale cluster autoscaler enabled node pool.")
agent_profile.count = int(node_count) # pylint: disable=no-member
# null out the SP and AAD profile because otherwise validation complains
instance.service_principal_profile = None
instance.aad_profile = None
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
raise CLIError('The nodepool "{}" was not found.'.format(nodepool_name))
# pylint: disable=inconsistent-return-statements
def aks_update(cmd, client, resource_group_name, name,
enable_cluster_autoscaler=False,
disable_cluster_autoscaler=False,
update_cluster_autoscaler=False,
cluster_autoscaler_profile=None,
min_count=None, max_count=None,
uptime_sla=False,
load_balancer_managed_outbound_ip_count=None,
load_balancer_outbound_ips=None,
load_balancer_outbound_ip_prefixes=None,
load_balancer_outbound_ports=None,
load_balancer_idle_timeout=None,
attach_acr=None,
detach_acr=None,
api_server_authorized_ip_ranges=None,
enable_aad=False,
aad_tenant_id=None,
aad_admin_group_object_ids=None,
enable_ahub=False,
disable_ahub=False,
no_wait=False):
update_autoscaler = enable_cluster_autoscaler + disable_cluster_autoscaler + update_cluster_autoscaler
update_lb_profile = is_load_balancer_profile_provided(load_balancer_managed_outbound_ip_count,
load_balancer_outbound_ips,
load_balancer_outbound_ip_prefixes,
load_balancer_outbound_ports,
load_balancer_idle_timeout)
update_aad_profile = not (aad_tenant_id is None and aad_admin_group_object_ids is None)
# pylint: disable=too-many-boolean-expressions
if (update_autoscaler != 1 and cluster_autoscaler_profile is None and
not update_lb_profile and
not attach_acr and
not detach_acr and
not uptime_sla and
api_server_authorized_ip_ranges is None and
not enable_aad and
not update_aad_profile and
not enable_ahub and
not disable_ahub):
raise CLIError('Please specify one or more of "--enable-cluster-autoscaler" or '
'"--disable-cluster-autoscaler" or '
'"--update-cluster-autoscaler" or '
'"--cluster-autoscaler-profile" or '
'"--load-balancer-managed-outbound-ip-count" or'
'"--load-balancer-outbound-ips" or '
'"--load-balancer-outbound-ip-prefixes" or'
'"--load-balancer-outbound-ports" or'
'"--load-balancer-idle-timeout" or'
'"--attach-acr" or "--detach-acr" or'
'"--uptime-sla" or'
'"--api-server-authorized-ip-ranges" or '
'"--enable-aad" or '
'"--aad-tenant-id" or '
'"--aad-admin-group-object-ids" or '
'"--enable-ahub" or '
'"--disable-ahub"')
instance = client.get(resource_group_name, name)
# For multi-agent pool, use the az aks nodepool command
if update_autoscaler > 0 and len(instance.agent_pool_profiles) > 1:
raise CLIError('There are more than one node pool in the cluster. Please use "az aks nodepool" command '
'to update per node pool auto scaler settings')
_validate_autoscaler_update_counts(min_count, max_count, enable_cluster_autoscaler or
update_cluster_autoscaler)
if enable_cluster_autoscaler:
if instance.agent_pool_profiles[0].enable_auto_scaling:
logger.warning('Cluster autoscaler is already enabled for this node pool.\n'
'Please run "az aks --update-cluster-autoscaler" '
'if you want to update min-count or max-count.')
return None
instance.agent_pool_profiles[0].min_count = int(min_count)
instance.agent_pool_profiles[0].max_count = int(max_count)
instance.agent_pool_profiles[0].enable_auto_scaling = True
if update_cluster_autoscaler:
if not instance.agent_pool_profiles[0].enable_auto_scaling:
raise CLIError('Cluster autoscaler is not enabled for this node pool.\n'
'Run "az aks nodepool update --enable-cluster-autoscaler" '
'to enable cluster with min-count and max-count.')
instance.agent_pool_profiles[0].min_count = int(min_count)
instance.agent_pool_profiles[0].max_count = int(max_count)
if disable_cluster_autoscaler:
if not instance.agent_pool_profiles[0].enable_auto_scaling:
logger.warning('Cluster autoscaler is already disabled for this node pool.')
return None
instance.agent_pool_profiles[0].enable_auto_scaling = False
instance.agent_pool_profiles[0].min_count = None
instance.agent_pool_profiles[0].max_count = None
# if intention is to clear autoscaler profile
if cluster_autoscaler_profile == {}:
instance.auto_scaler_profile = {}
# else profile is provided, update instance profile if it exists
elif cluster_autoscaler_profile:
instance.auto_scaler_profile = _update_dict(instance.auto_scaler_profile.__dict__,
dict((key.replace("-", "_"), value)
for (key, value) in cluster_autoscaler_profile.items())) \
if instance.auto_scaler_profile else cluster_autoscaler_profile
subscription_id = get_subscription_id(cmd.cli_ctx)
client_id = ""
if instance.identity is not None and instance.identity.type == "SystemAssigned":
if instance.identity_profile is None or instance.identity_profile["kubeletidentity"] is None:
raise CLIError('Unexpected error getting kubelet\'s identity for the cluster. '
'Please do not set --attach-acr or --detach-acr. '
'You can manually grant or revoke permission to the identity named '
'<ClUSTER_NAME>-agentpool in MC_ resource group to access ACR.')
client_id = instance.identity_profile["kubeletidentity"].client_id
else:
client_id = instance.service_principal_profile.client_id
if not client_id:
raise CLIError('Cannot get the AKS cluster\'s service principal.')
if attach_acr:
_ensure_aks_acr(cmd.cli_ctx,
client_id=client_id,
acr_name_or_id=attach_acr,
subscription_id=subscription_id)
if detach_acr:
_ensure_aks_acr(cmd.cli_ctx,
client_id=client_id,
acr_name_or_id=detach_acr,
subscription_id=subscription_id,
detach=True)
if uptime_sla:
instance.sku = ManagedClusterSKU(
name="Basic",
tier="Paid"
)
if update_lb_profile:
instance.network_profile.load_balancer_profile = update_load_balancer_profile(
load_balancer_managed_outbound_ip_count,
load_balancer_outbound_ips,
load_balancer_outbound_ip_prefixes,
load_balancer_outbound_ports,
load_balancer_idle_timeout,
instance.network_profile.load_balancer_profile)
# empty string is valid as it disables ip whitelisting
if api_server_authorized_ip_ranges is not None:
instance.api_server_access_profile = \
_populate_api_server_access_profile(api_server_authorized_ip_ranges, instance=instance)
if enable_aad:
if instance.aad_profile is not None and instance.aad_profile.managed:
raise CLIError('Cannot specify "--enable-aad" if managed AAD is already enabled')
instance.aad_profile = ManagedClusterAADProfile(
managed=True
)
if update_aad_profile:
if instance.aad_profile is None or not instance.aad_profile.managed:
raise CLIError('Cannot specify "--aad-tenant-id/--aad-admin-group-object-ids"'
' if managed AAD is not enabled')
if aad_tenant_id is not None:
instance.aad_profile.tenant_id = aad_tenant_id
if aad_admin_group_object_ids is not None:
instance.aad_profile.admin_group_object_ids = _parse_comma_separated_list(aad_admin_group_object_ids)
if enable_ahub and disable_ahub:
raise CLIError('Cannot specify "--enable-ahub" and "--disable-ahub" at the same time')
if enable_ahub:
instance.windows_profile.license_type = 'Windows_Server'
if disable_ahub:
instance.windows_profile.license_type = 'None'
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
# pylint: disable=unused-argument,inconsistent-return-statements,too-many-return-statements
def aks_upgrade(cmd,
client,
resource_group_name, name,
kubernetes_version='',
control_plane_only=False,
node_image_only=False,
no_wait=False,
yes=False):
msg = 'Kubernetes may be unavailable during cluster upgrades.\n Are you sure you want to perform this operation?'
if not yes and not prompt_y_n(msg, default="n"):
return None
instance = client.get(resource_group_name, name)
vmas_cluster = False
for agent_profile in instance.agent_pool_profiles:
if agent_profile.type.lower() == "availabilityset":
vmas_cluster = True
break
if kubernetes_version != '' and node_image_only:
raise CLIError('Conflicting flags. Upgrading the Kubernetes version will also upgrade node image version. '
'If you only want to upgrade the node version please use the "--node-image-only" option only.')
if node_image_only:
msg = "This node image upgrade operation will run across every node pool in the cluster" \
"and might take a while, do you wish to continue?"
if not yes and not prompt_y_n(msg, default="n"):
return None
# This only provide convenience for customer at client side so they can run az aks upgrade to upgrade all
# nodepools of a cluster. The SDK only support upgrade single nodepool at a time.
for agent_pool_profile in instance.agent_pool_profiles:
if vmas_cluster:
raise CLIError('This cluster is not using VirtualMachineScaleSets. Node image upgrade only operation '
'can only be applied on VirtualMachineScaleSets cluster.')
_upgrade_single_nodepool_image_version(True, client, resource_group_name, name, agent_pool_profile.name)
mc = client.get(resource_group_name, name)
return _remove_nulls([mc])[0]
if instance.kubernetes_version == kubernetes_version:
if instance.provisioning_state == "Succeeded":
logger.warning("The cluster is already on version %s and is not in a failed state. No operations "
"will occur when upgrading to the same version if the cluster is not in a failed state.",
instance.kubernetes_version)
elif instance.provisioning_state == "Failed":
logger.warning("Cluster currently in failed state. Proceeding with upgrade to existing version %s to "
"attempt resolution of failed cluster state.", instance.kubernetes_version)
upgrade_all = False
instance.kubernetes_version = kubernetes_version
# for legacy clusters, we always upgrade node pools with CCP.
if instance.max_agent_pools < 8 or vmas_cluster:
if control_plane_only:
msg = ("Legacy clusters do not support control plane only upgrade. All node pools will be "
"upgraded to {} as well. Continue?").format(instance.kubernetes_version)
if not yes and not prompt_y_n(msg, default="n"):
return None
upgrade_all = True
else:
if not control_plane_only:
msg = ("Since control-plane-only argument is not specified, this will upgrade the control plane "
"AND all nodepools to version {}. Continue?").format(instance.kubernetes_version)
if not yes and not prompt_y_n(msg, default="n"):
return None
upgrade_all = True
else:
msg = ("Since control-plane-only argument is specified, this will upgrade only the control plane to {}. "
"Node pool will not change. Continue?").format(instance.kubernetes_version)
if not yes and not prompt_y_n(msg, default="n"):
return None
if upgrade_all:
for agent_profile in instance.agent_pool_profiles:
agent_profile.orchestrator_version = kubernetes_version
# null out the SP and AAD profile because otherwise validation complains
instance.service_principal_profile = None
instance.aad_profile = None
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
def _upgrade_single_nodepool_image_version(no_wait, client, resource_group_name, cluster_name, nodepool_name):
return sdk_no_wait(no_wait, client.upgrade_node_image_version, resource_group_name, cluster_name, nodepool_name)
DEV_SPACES_EXTENSION_NAME = 'dev-spaces'
DEV_SPACES_EXTENSION_MODULE = 'azext_dev_spaces.custom'
def aks_use_dev_spaces(cmd, client, name, resource_group_name, update=False, space_name=None,
endpoint_type='Public', prompt=False):
"""
Use Azure Dev Spaces with a managed Kubernetes cluster.
:param name: Name of the managed cluster.
:type name: String
:param resource_group_name: Name of resource group. You can configure the default group. \
Using 'az configure --defaults group=<name>'.
:type resource_group_name: String
:param update: Update to the latest Azure Dev Spaces client components.
:type update: bool
:param space_name: Name of the new or existing dev space to select. Defaults to an \
interactive selection experience.
:type space_name: String
:param endpoint_type: The endpoint type to be used for a Azure Dev Spaces controller. \
See https://aka.ms/azds-networking for more information.
:type endpoint_type: String
:param prompt: Do not prompt for confirmation. Requires --space.
:type prompt: bool
"""
if _get_or_add_extension(cmd, DEV_SPACES_EXTENSION_NAME, DEV_SPACES_EXTENSION_MODULE, update):
azext_custom = _get_azext_module(DEV_SPACES_EXTENSION_NAME, DEV_SPACES_EXTENSION_MODULE)
try:
azext_custom.ads_use_dev_spaces(name, resource_group_name, update, space_name, endpoint_type, prompt)
except TypeError:
raise CLIError("Use '--update' option to get the latest Azure Dev Spaces client components.")
except AttributeError as ae:
raise CLIError(ae)
def aks_remove_dev_spaces(cmd, client, name, resource_group_name, prompt=False):
"""
Remove Azure Dev Spaces from a managed Kubernetes cluster.
:param name: Name of the managed cluster.
:type name: String
:param resource_group_name: Name of resource group. You can configure the default group. \
Using 'az configure --defaults group=<name>'.
:type resource_group_name: String
:param prompt: Do not prompt for confirmation.
:type prompt: bool
"""
if _get_or_add_extension(cmd, DEV_SPACES_EXTENSION_NAME, DEV_SPACES_EXTENSION_MODULE):
azext_custom = _get_azext_module(DEV_SPACES_EXTENSION_NAME, DEV_SPACES_EXTENSION_MODULE)
try:
azext_custom.ads_remove_dev_spaces(name, resource_group_name, prompt)
except AttributeError as ae:
raise CLIError(ae)
def aks_rotate_certs(cmd, client, resource_group_name, name, no_wait=True):
return sdk_no_wait(no_wait, client.rotate_cluster_certificates, resource_group_name, name)
def _update_addons(cmd, instance, subscription_id, resource_group_name, name, addons, enable,
workspace_resource_id=None,
subnet_name=None,
appgw_name=None,
appgw_subnet_cidr=None,
appgw_id=None,
appgw_subnet_id=None,
appgw_watch_namespace=None,
no_wait=False):
# parse the comma-separated addons argument
addon_args = addons.split(',')
addon_profiles = instance.addon_profiles or {}
os_type = 'Linux'
# for each addons argument
for addon_arg in addon_args:
if addon_arg not in ADDONS:
raise CLIError("Invalid addon name: {}.".format(addon_arg))
addon = ADDONS[addon_arg]
if addon == CONST_VIRTUAL_NODE_ADDON_NAME:
# only linux is supported for now, in the future this will be a user flag
addon += os_type
# honor addon names defined in Azure CLI
for key in list(addon_profiles):
if key.lower() == addon.lower() and key != addon:
addon_profiles[addon] = addon_profiles.pop(key)
if enable:
# add new addons or update existing ones and enable them
addon_profile = addon_profiles.get(addon, ManagedClusterAddonProfile(enabled=False))
# special config handling for certain addons
if addon == CONST_MONITORING_ADDON_NAME:
if addon_profile.enabled:
raise CLIError('The monitoring addon is already enabled for this managed cluster.\n'
'To change monitoring configuration, run "az aks disable-addons -a monitoring"'
'before enabling it again.')
if not workspace_resource_id:
workspace_resource_id = _ensure_default_log_analytics_workspace_for_monitoring(
cmd,
subscription_id,
resource_group_name)
workspace_resource_id = workspace_resource_id.strip()
if not workspace_resource_id.startswith('/'):
workspace_resource_id = '/' + workspace_resource_id
if workspace_resource_id.endswith('/'):
workspace_resource_id = workspace_resource_id.rstrip('/')
addon_profile.config = {CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID: workspace_resource_id}
elif addon == (CONST_VIRTUAL_NODE_ADDON_NAME + os_type):
if addon_profile.enabled:
raise CLIError('The virtual-node addon is already enabled for this managed cluster.\n'
'To change virtual-node configuration, run '
'"az aks disable-addons -a virtual-node -g {resource_group_name}" '
'before enabling it again.')
if not subnet_name:
raise CLIError('The aci-connector addon requires setting a subnet name.')
addon_profile.config = {CONST_VIRTUAL_NODE_SUBNET_NAME: subnet_name}
elif addon == CONST_INGRESS_APPGW_ADDON_NAME:
if addon_profile.enabled:
raise CLIError('The ingress-appgw addon is already enabled for this managed cluster.\n'
'To change ingress-appgw configuration, run '
f'"az aks disable-addons -a ingress-appgw -n {name} -g {resource_group_name}" '
'before enabling it again.')
addon_profile = ManagedClusterAddonProfile(enabled=True, config={})
if appgw_name is not None:
addon_profile.config[CONST_INGRESS_APPGW_APPLICATION_GATEWAY_NAME] = appgw_name
if appgw_subnet_cidr is not None:
addon_profile.config[CONST_INGRESS_APPGW_SUBNET_CIDR] = appgw_subnet_cidr
if appgw_id is not None:
addon_profile.config[CONST_INGRESS_APPGW_APPLICATION_GATEWAY_ID] = appgw_id
if appgw_subnet_id is not None:
addon_profile.config[CONST_INGRESS_APPGW_SUBNET_ID] = appgw_subnet_id
if appgw_watch_namespace is not None:
addon_profile.config[CONST_INGRESS_APPGW_WATCH_NAMESPACE] = appgw_watch_namespace
addon_profiles[addon] = addon_profile
else:
if addon not in addon_profiles:
if addon == CONST_KUBE_DASHBOARD_ADDON_NAME:
addon_profiles[addon] = ManagedClusterAddonProfile(enabled=False)
else:
raise CLIError("The addon {} is not installed.".format(addon))
addon_profiles[addon].config = None
addon_profiles[addon].enabled = enable
instance.addon_profiles = addon_profiles
# null out the SP and AAD profile because otherwise validation complains
instance.service_principal_profile = None
instance.aad_profile = None
return instance
def _get_azext_module(extension_name, module_name):
try:
# Adding the installed extension in the path
from azure.cli.core.extension.operations import add_extension_to_path
add_extension_to_path(extension_name)
# Import the extension module
from importlib import import_module
azext_custom = import_module(module_name)
return azext_custom
except ImportError as ie:
raise CLIError(ie)
def _handle_addons_args(cmd, addons_str, subscription_id, resource_group_name, addon_profiles=None,
workspace_resource_id=None,
aci_subnet_name=None,
vnet_subnet_id=None,
appgw_name=None,
appgw_subnet_cidr=None,
appgw_id=None,
appgw_subnet_id=None,
appgw_watch_namespace=None):
if not addon_profiles:
addon_profiles = {}
addons = addons_str.split(',') if addons_str else []
if 'http_application_routing' in addons:
addon_profiles[CONST_HTTP_APPLICATION_ROUTING_ADDON_NAME] = ManagedClusterAddonProfile(enabled=True)
addons.remove('http_application_routing')
if 'kube-dashboard' in addons:
addon_profiles[CONST_KUBE_DASHBOARD_ADDON_NAME] = ManagedClusterAddonProfile(enabled=True)
addons.remove('kube-dashboard')
# TODO: can we help the user find a workspace resource ID?
if 'monitoring' in addons:
if not workspace_resource_id:
# use default workspace if exists else create default workspace
workspace_resource_id = _ensure_default_log_analytics_workspace_for_monitoring(
cmd, subscription_id, resource_group_name)
workspace_resource_id = workspace_resource_id.strip()
if not workspace_resource_id.startswith('/'):
workspace_resource_id = '/' + workspace_resource_id
if workspace_resource_id.endswith('/'):
workspace_resource_id = workspace_resource_id.rstrip('/')
addon_profiles[CONST_MONITORING_ADDON_NAME] = ManagedClusterAddonProfile(
enabled=True, config={CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID: workspace_resource_id})
addons.remove('monitoring')
# error out if '--enable-addons=monitoring' isn't set but workspace_resource_id is
elif workspace_resource_id:
raise CLIError('"--workspace-resource-id" requires "--enable-addons monitoring".')
if 'azure-policy' in addons:
addon_profiles[CONST_AZURE_POLICY_ADDON_NAME] = ManagedClusterAddonProfile(enabled=True)
addons.remove('azure-policy')
if 'virtual-node' in addons:
if not aci_subnet_name or not vnet_subnet_id:
raise CLIError('"--enable-addons virtual-node" requires "--aci-subnet-name" and "--vnet-subnet-id".')
# TODO: how about aciConnectorwindows, what is its addon name?
os_type = 'Linux'
addon_profiles[CONST_VIRTUAL_NODE_ADDON_NAME + os_type] = ManagedClusterAddonProfile(
enabled=True,
config={CONST_VIRTUAL_NODE_SUBNET_NAME: aci_subnet_name}
)
addons.remove('virtual-node')
if 'ingress-appgw' in addons:
addon_profile = ManagedClusterAddonProfile(enabled=True, config={})
if appgw_name is not None:
addon_profile.config[CONST_INGRESS_APPGW_APPLICATION_GATEWAY_NAME] = appgw_name
if appgw_subnet_cidr is not None:
addon_profile.config[CONST_INGRESS_APPGW_SUBNET_CIDR] = appgw_subnet_cidr
if appgw_id is not None:
addon_profile.config[CONST_INGRESS_APPGW_APPLICATION_GATEWAY_ID] = appgw_id
if appgw_subnet_id is not None:
addon_profile.config[CONST_INGRESS_APPGW_SUBNET_ID] = appgw_subnet_id
if appgw_watch_namespace is not None:
addon_profile.config[CONST_INGRESS_APPGW_WATCH_NAMESPACE] = appgw_watch_namespace
addon_profiles[CONST_INGRESS_APPGW_ADDON_NAME] = addon_profile
addons.remove('ingress-appgw')
# error out if any (unrecognized) addons remain
if addons:
raise CLIError('"{}" {} not recognized by the --enable-addons argument.'.format(
",".join(addons), "are" if len(addons) > 1 else "is"))
return addon_profiles
def _install_dev_spaces_extension(cmd, extension_name):
try:
from azure.cli.core.extension import operations
operations.add_extension(cmd=cmd, extension_name=extension_name)
except Exception: # nopa pylint: disable=broad-except
return False
return True
def _update_dev_spaces_extension(cmd, extension_name, extension_module):
from azure.cli.core.extension import ExtensionNotInstalledException
try:
from azure.cli.core.extension import operations
operations.update_extension(cmd=cmd, extension_name=extension_name)
operations.reload_extension(extension_name=extension_name)
except CLIError as err:
logger.info(err)
except ExtensionNotInstalledException as err:
logger.debug(err)
return False
except ModuleNotFoundError as err:
logger.debug(err)
logger.error("Error occurred attempting to load the extension module. Use --debug for more information.")
return False
return True
def _get_or_add_extension(cmd, extension_name, extension_module, update=False):
from azure.cli.core.extension import (ExtensionNotInstalledException, get_extension)
try:
get_extension(extension_name)
if update:
return _update_dev_spaces_extension(cmd, extension_name, extension_module)
except ExtensionNotInstalledException:
return _install_dev_spaces_extension(cmd, extension_name)
return True
def _ensure_default_log_analytics_workspace_for_monitoring(cmd, subscription_id, resource_group_name):
# mapping for azure public cloud
# log analytics workspaces cannot be created in WCUS region due to capacity limits
# so mapped to EUS per discussion with log analytics team
AzureCloudLocationToOmsRegionCodeMap = {
"australiasoutheast": "ASE",
"australiaeast": "EAU",
"australiacentral": "CAU",
"canadacentral": "CCA",
"centralindia": "CIN",
"centralus": "CUS",
"eastasia": "EA",
"eastus": "EUS",
"eastus2": "EUS2",
"eastus2euap": "EAP",
"francecentral": "PAR",
"japaneast": "EJP",
"koreacentral": "SE",
"northeurope": "NEU",
"southcentralus": "SCUS",
"southeastasia": "SEA",
"uksouth": "SUK",
"usgovvirginia": "USGV",
"westcentralus": "EUS",
"westeurope": "WEU",
"westus": "WUS",
"westus2": "WUS2"
}
AzureCloudRegionToOmsRegionMap = {
"australiacentral": "australiacentral",
"australiacentral2": "australiacentral",
"australiaeast": "australiaeast",
"australiasoutheast": "australiasoutheast",
"brazilsouth": "southcentralus",
"canadacentral": "canadacentral",
"canadaeast": "canadacentral",
"centralus": "centralus",
"centralindia": "centralindia",
"eastasia": "eastasia",
"eastus": "eastus",
"eastus2": "eastus2",
"francecentral": "francecentral",
"francesouth": "francecentral",
"japaneast": "japaneast",
"japanwest": "japaneast",
"koreacentral": "koreacentral",
"koreasouth": "koreacentral",
"northcentralus": "eastus",
"northeurope": "northeurope",
"southafricanorth": "westeurope",
"southafricawest": "westeurope",
"southcentralus": "southcentralus",
"southeastasia": "southeastasia",
"southindia": "centralindia",
"uksouth": "uksouth",
"ukwest": "uksouth",
"westcentralus": "eastus",
"westeurope": "westeurope",
"westindia": "centralindia",
"westus": "westus",
"westus2": "westus2"
}
# mapping for azure china cloud
# currently log analytics supported only China East 2 region
AzureChinaLocationToOmsRegionCodeMap = {
"chinaeast": "EAST2",
"chinaeast2": "EAST2",
"chinanorth": "EAST2",
"chinanorth2": "EAST2"
}
AzureChinaRegionToOmsRegionMap = {
"chinaeast": "chinaeast2",
"chinaeast2": "chinaeast2",
"chinanorth": "chinaeast2",
"chinanorth2": "chinaeast2"
}
# mapping for azure us governmner cloud
AzureFairfaxLocationToOmsRegionCodeMap = {
"usgovvirginia": "USGV"
}
AzureFairfaxRegionToOmsRegionMap = {
"usgovvirginia": "usgovvirginia"
}
rg_location = _get_rg_location(cmd.cli_ctx, resource_group_name)
cloud_name = cmd.cli_ctx.cloud.name
workspace_region = "eastus"
workspace_region_code = "EUS"
# sanity check that locations and clouds match.
if ((cloud_name.lower() == 'azurecloud' and AzureChinaRegionToOmsRegionMap.get(rg_location, False)) or
(cloud_name.lower() == 'azurecloud' and AzureFairfaxRegionToOmsRegionMap.get(rg_location, False))):
raise CLIError('Wrong cloud (azurecloud) setting for region {}, please use "az cloud set ..."'
.format(rg_location))
if ((cloud_name.lower() == 'azurechinacloud' and AzureCloudRegionToOmsRegionMap.get(rg_location, False)) or
(cloud_name.lower() == 'azurechinacloud' and AzureFairfaxRegionToOmsRegionMap.get(rg_location, False))):
raise CLIError('Wrong cloud (azurechinacloud) setting for region {}, please use "az cloud set ..."'
.format(rg_location))
if ((cloud_name.lower() == 'azureusgovernment' and AzureCloudRegionToOmsRegionMap.get(rg_location, False)) or
(cloud_name.lower() == 'azureusgovernment' and AzureChinaRegionToOmsRegionMap.get(rg_location, False))):
raise CLIError('Wrong cloud (azureusgovernment) setting for region {}, please use "az cloud set ..."'
.format(rg_location))
if cloud_name.lower() == 'azurecloud':
workspace_region = AzureCloudRegionToOmsRegionMap.get(rg_location, "eastus")
workspace_region_code = AzureCloudLocationToOmsRegionCodeMap.get(workspace_region, "EUS")
elif cloud_name.lower() == 'azurechinacloud':
workspace_region = AzureChinaRegionToOmsRegionMap.get(rg_location, "chinaeast2")
workspace_region_code = AzureChinaLocationToOmsRegionCodeMap.get(workspace_region, "EAST2")
elif cloud_name.lower() == 'azureusgovernment':
workspace_region = AzureFairfaxRegionToOmsRegionMap.get(rg_location, "usgovvirginia")
workspace_region_code = AzureFairfaxLocationToOmsRegionCodeMap.get(workspace_region, "USGV")
else:
workspace_region = rg_location
workspace_region_code = rg_location.upper()
default_workspace_resource_group = 'DefaultResourceGroup-' + workspace_region_code
default_workspace_name = 'DefaultWorkspace-{0}-{1}'.format(subscription_id, workspace_region_code)
default_workspace_resource_id = '/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.OperationalInsights' \
'/workspaces/{2}'.format(subscription_id, default_workspace_resource_group, default_workspace_name)
resource_groups = cf_resource_groups(cmd.cli_ctx, subscription_id)
resources = cf_resources(cmd.cli_ctx, subscription_id)
# check if default RG exists
if resource_groups.check_existence(default_workspace_resource_group):
try:
resource = resources.get_by_id(default_workspace_resource_id, '2015-11-01-preview')
return resource.id
except CloudError as ex:
if ex.status_code != 404:
raise ex
else:
resource_groups.create_or_update(default_workspace_resource_group, {'location': workspace_region})
default_workspace_params = {
'location': workspace_region,
'properties': {
'sku': {
'name': 'standalone'
}
}
}
async_poller = resources.create_or_update_by_id(default_workspace_resource_id, '2015-11-01-preview',
default_workspace_params)
ws_resource_id = ''
while True:
result = async_poller.result(15)
if async_poller.done():
ws_resource_id = result.id
break
return ws_resource_id
def _ensure_container_insights_for_monitoring(cmd, addon):
# Workaround for this addon key which has been seen lowercased in the wild.
for key in list(addon.config):
if (key.lower() == CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID.lower() and
key != CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID):
addon.config[CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID] = addon.config.pop(key)
workspace_resource_id = addon.config[CONST_MONITORING_LOG_ANALYTICS_WORKSPACE_RESOURCE_ID]
workspace_resource_id = workspace_resource_id.strip()
if not workspace_resource_id.startswith('/'):
workspace_resource_id = '/' + workspace_resource_id
if workspace_resource_id.endswith('/'):
workspace_resource_id = workspace_resource_id.rstrip('/')
# extract subscription ID and resource group from workspace_resource_id URL
try:
subscription_id = workspace_resource_id.split('/')[2]
resource_group = workspace_resource_id.split('/')[4]
except IndexError:
raise CLIError('Could not locate resource group in workspace-resource-id URL.')
# region of workspace can be different from region of RG so find the location of the workspace_resource_id
resources = cf_resources(cmd.cli_ctx, subscription_id)
try:
resource = resources.get_by_id(workspace_resource_id, '2015-11-01-preview')
location = resource.location
except CloudError as ex:
raise ex
unix_time_in_millis = int(
(datetime.datetime.utcnow() - datetime.datetime.utcfromtimestamp(0)).total_seconds() * 1000.0)
solution_deployment_name = 'ContainerInsights-{}'.format(unix_time_in_millis)
# pylint: disable=line-too-long
template = {
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"workspaceResourceId": {
"type": "string",
"metadata": {
"description": "Azure Monitor Log Analytics Resource ID"
}
},
"workspaceRegion": {
"type": "string",
"metadata": {
"description": "Azure Monitor Log Analytics workspace region"
}
},
"solutionDeploymentName": {
"type": "string",
"metadata": {
"description": "Name of the solution deployment"
}
}
},
"resources": [
{
"type": "Microsoft.Resources/deployments",
"name": "[parameters('solutionDeploymentName')]",
"apiVersion": "2017-05-10",
"subscriptionId": "[split(parameters('workspaceResourceId'),'/')[2]]",
"resourceGroup": "[split(parameters('workspaceResourceId'),'/')[4]]",
"properties": {
"mode": "Incremental",
"template": {
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {},
"variables": {},
"resources": [
{
"apiVersion": "2015-11-01-preview",
"type": "Microsoft.OperationsManagement/solutions",
"location": "[parameters('workspaceRegion')]",
"name": "[Concat('ContainerInsights', '(', split(parameters('workspaceResourceId'),'/')[8], ')')]",
"properties": {
"workspaceResourceId": "[parameters('workspaceResourceId')]"
},
"plan": {
"name": "[Concat('ContainerInsights', '(', split(parameters('workspaceResourceId'),'/')[8], ')')]",
"product": "[Concat('OMSGallery/', 'ContainerInsights')]",
"promotionCode": "",
"publisher": "Microsoft"
}
}
]
},
"parameters": {}
}
}
]
}
params = {
"workspaceResourceId": {
"value": workspace_resource_id
},
"workspaceRegion": {
"value": location
},
"solutionDeploymentName": {
"value": solution_deployment_name
}
}
deployment_name = 'aks-monitoring-{}'.format(unix_time_in_millis)
# publish the Container Insights solution to the Log Analytics workspace
return _invoke_deployment(cmd, resource_group, deployment_name, template, params,
validate=False, no_wait=False, subscription_id=subscription_id)
def _ensure_aks_acr(cli_ctx,
client_id,
acr_name_or_id,
subscription_id,
detach=False):
from msrestazure.tools import is_valid_resource_id, parse_resource_id
# Check if the ACR exists by resource ID.
if is_valid_resource_id(acr_name_or_id):
try:
parsed_registry = parse_resource_id(acr_name_or_id)
acr_client = cf_container_registry_service(cli_ctx, subscription_id=parsed_registry['subscription'])
registry = acr_client.registries.get(parsed_registry['resource_group'], parsed_registry['name'])
except CloudError as ex:
raise CLIError(ex.message)
_ensure_aks_acr_role_assignment(cli_ctx, client_id, registry.id, detach)
return
# Check if the ACR exists by name accross all resource groups.
registry_name = acr_name_or_id
registry_resource = 'Microsoft.ContainerRegistry/registries'
try:
registry = get_resource_by_name(cli_ctx, registry_name, registry_resource)
except CloudError as ex:
if 'was not found' in ex.message:
raise CLIError("ACR {} not found. Have you provided the right ACR name?".format(registry_name))
raise CLIError(ex.message)
_ensure_aks_acr_role_assignment(cli_ctx, client_id, registry.id, detach)
return
def aks_agentpool_show(cmd, client, resource_group_name, cluster_name, nodepool_name):
instance = client.get(resource_group_name, cluster_name, nodepool_name)
return instance
def aks_agentpool_list(cmd, client, resource_group_name, cluster_name):
return client.list(resource_group_name, cluster_name)
def aks_agentpool_add(cmd, client, resource_group_name, cluster_name, nodepool_name,
kubernetes_version=None,
zones=None,
enable_node_public_ip=False,
node_vm_size=None,
node_osdisk_type=None,
node_osdisk_size=0,
node_count=3,
vnet_subnet_id=None,
ppg=None,
max_pods=0,
os_type="Linux",
min_count=None,
max_count=None,
enable_cluster_autoscaler=False,
node_taints=None,
priority=CONST_SCALE_SET_PRIORITY_REGULAR,
eviction_policy=CONST_SPOT_EVICTION_POLICY_DELETE,
spot_max_price=float('nan'),
tags=None,
labels=None,
max_surge=None,
mode="User",
no_wait=False):
instances = client.list(resource_group_name, cluster_name)
for agentpool_profile in instances:
if agentpool_profile.name == nodepool_name:
raise CLIError("Node pool {} already exists, please try a different name, "
"use 'aks nodepool list' to get current list of node pool".format(nodepool_name))
upgradeSettings = AgentPoolUpgradeSettings()
taints_array = []
if node_taints is not None:
for taint in node_taints.split(','):
try:
taint = taint.strip()
taints_array.append(taint)
except ValueError:
raise CLIError('Taint does not match allowed values. Expect value such as "special=true:NoSchedule".')
if node_vm_size is None:
if os_type.lower() == "windows":
node_vm_size = "Standard_D2s_v3"
else:
node_vm_size = "Standard_DS2_v2"
if max_surge:
upgradeSettings.max_surge = max_surge
agent_pool = AgentPool(
name=nodepool_name,
tags=tags,
node_labels=labels,
count=int(node_count),
vm_size=node_vm_size,
os_type=os_type,
vnet_subnet_id=vnet_subnet_id,
proximity_placement_group_id=ppg,
agent_pool_type="VirtualMachineScaleSets",
max_pods=int(max_pods) if max_pods else None,
orchestrator_version=kubernetes_version,
availability_zones=zones,
scale_set_priority=priority,
enable_node_public_ip=enable_node_public_ip,
node_taints=taints_array,
upgrade_settings=upgradeSettings,
mode=mode
)
if priority == CONST_SCALE_SET_PRIORITY_SPOT:
agent_pool.scale_set_eviction_policy = eviction_policy
if isnan(spot_max_price):
spot_max_price = -1
agent_pool.spot_max_price = spot_max_price
_check_cluster_autoscaler_flag(enable_cluster_autoscaler, min_count, max_count, node_count, agent_pool)
if node_osdisk_size:
agent_pool.os_disk_size_gb = int(node_osdisk_size)
if node_osdisk_type:
agent_pool.os_disk_type = node_osdisk_type
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, cluster_name, nodepool_name, agent_pool)
def aks_agentpool_scale(cmd, client, resource_group_name, cluster_name,
nodepool_name,
node_count=3,
no_wait=False):
instance = client.get(resource_group_name, cluster_name, nodepool_name)
new_node_count = int(node_count)
if instance.enable_auto_scaling:
raise CLIError("Cannot scale cluster autoscaler enabled node pool.")
if new_node_count == instance.count:
raise CLIError("The new node count is the same as the current node count.")
instance.count = new_node_count # pylint: disable=no-member
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, cluster_name, nodepool_name, instance)
def aks_agentpool_upgrade(cmd, client, resource_group_name, cluster_name,
nodepool_name,
kubernetes_version='',
node_image_only=False,
max_surge=None,
no_wait=False):
if kubernetes_version != '' and node_image_only:
raise CLIError('Conflicting flags. Upgrading the Kubernetes version will also upgrade node image version.'
'If you only want to upgrade the node version please use the "--node-image-only" option only.')
if node_image_only:
managed_cluster_client = cf_managed_clusters(cmd.cli_ctx)
return _upgrade_single_nodepool_image_version(no_wait,
managed_cluster_client,
resource_group_name,
cluster_name,
nodepool_name)
instance = client.get(resource_group_name, cluster_name, nodepool_name)
instance.orchestrator_version = kubernetes_version
if not instance.upgrade_settings:
instance.upgrade_settings = AgentPoolUpgradeSettings()
if max_surge:
instance.upgrade_settings.max_surge = max_surge
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, cluster_name, nodepool_name, instance)
def aks_agentpool_update(cmd, client, resource_group_name, cluster_name, nodepool_name,
enable_cluster_autoscaler=False,
disable_cluster_autoscaler=False,
update_cluster_autoscaler=False,
min_count=None, max_count=None,
tags=None,
max_surge=None,
mode=None,
no_wait=False):
update_autoscaler = enable_cluster_autoscaler + disable_cluster_autoscaler + update_cluster_autoscaler
if update_autoscaler > 1:
raise CLIError('Please specify one of "--enable-cluster-autoscaler" or '
'"--disable-cluster-autoscaler" or '
'"--update-cluster-autoscaler"')
if (update_autoscaler == 0 and not tags and not mode and not max_surge):
raise CLIError('Please specify one or more of "--enable-cluster-autoscaler" or '
'"--disable-cluster-autoscaler" or '
'"--update-cluster-autoscaler" or '
'"--tags" or "--mode" or "--max-surge"')
instance = client.get(resource_group_name, cluster_name, nodepool_name)
_validate_autoscaler_update_counts(min_count, max_count, enable_cluster_autoscaler or
update_cluster_autoscaler)
if enable_cluster_autoscaler:
if instance.enable_auto_scaling:
logger.warning('Autoscaler is already enabled for this node pool.\n'
'Please run "az aks nodepool update --update-cluster-autoscaler" '
'if you want to update min-count or max-count.')
return None
instance.min_count = int(min_count)
instance.max_count = int(max_count)
instance.enable_auto_scaling = True
if update_cluster_autoscaler:
if not instance.enable_auto_scaling:
raise CLIError('Autoscaler is not enabled for this node pool.\n'
'Run "az aks nodepool update --enable-cluster-autoscaler" '
'to enable cluster with min-count and max-count.')
instance.min_count = int(min_count)
instance.max_count = int(max_count)
if not instance.upgrade_settings:
instance.upgrade_settings = AgentPoolUpgradeSettings()
if max_surge:
instance.upgrade_settings.max_surge = max_surge
if disable_cluster_autoscaler:
if not instance.enable_auto_scaling:
logger.warning('Autoscaler is already disabled for this node pool.')
return None
instance.enable_auto_scaling = False
instance.min_count = None
instance.max_count = None
instance.tags = tags
if mode is not None:
instance.mode = mode
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, cluster_name, nodepool_name, instance)
def aks_agentpool_delete(cmd, client, resource_group_name, cluster_name,
nodepool_name,
no_wait=False):
agentpool_exists = False
instances = client.list(resource_group_name, cluster_name)
for agentpool_profile in instances:
if agentpool_profile.name.lower() == nodepool_name.lower():
agentpool_exists = True
break
if not agentpool_exists:
raise CLIError("Node pool {} doesnt exist, "
"use 'aks nodepool list' to get current node pool list".format(nodepool_name))
return sdk_no_wait(no_wait, client.delete, resource_group_name, cluster_name, nodepool_name)
def aks_agentpool_get_upgrade_profile(cmd, client, resource_group_name, cluster_name, nodepool_name):
return client.get_upgrade_profile(resource_group_name, cluster_name, nodepool_name)
def _ensure_aks_acr_role_assignment(cli_ctx,
client_id,
registry_id,
detach=False):
if detach:
if not _delete_role_assignments(cli_ctx,
'acrpull',
client_id,
scope=registry_id):
raise CLIError('Could not delete role assignments for ACR. '
'Are you an Owner on this subscription?')
return
if not _add_role_assignment(cli_ctx,
'acrpull',
client_id,
scope=registry_id):
raise CLIError('Could not create a role assignment for ACR. '
'Are you an Owner on this subscription?')
return
def _ensure_aks_service_principal(cli_ctx,
service_principal=None,
client_secret=None,
subscription_id=None,
dns_name_prefix=None,
location=None,
name=None):
aad_session_key = None
# TODO: This really needs to be unit tested.
rbac_client = get_graph_rbac_management_client(cli_ctx)
if not service_principal:
# --service-principal not specified, make one.
if not client_secret:
client_secret = _create_client_secret()
salt = binascii.b2a_hex(os.urandom(3)).decode('utf-8')
url = 'https://{}.{}.{}.cloudapp.azure.com'.format(salt, dns_name_prefix, location)
service_principal, aad_session_key = _build_service_principal(rbac_client, cli_ctx, name, url, client_secret)
if not service_principal:
raise CLIError('Could not create a service principal with the right permissions. '
'Are you an Owner on this project?')
logger.info('Created a service principal: %s', service_principal)
# We don't need to add role assignment for this created SPN
else:
# --service-principal specfied, validate --client-secret was too
if not client_secret:
raise CLIError('--client-secret is required if --service-principal is specified')
return {
'client_secret': client_secret,
'service_principal': service_principal,
'aad_session_key': aad_session_key,
}
def _ensure_osa_aad(cli_ctx,
aad_client_app_id=None,
aad_client_app_secret=None,
aad_tenant_id=None,
identifier=None,
name=None, create=False,
customer_admin_group_id=None):
rbac_client = get_graph_rbac_management_client(cli_ctx)
if create:
# This reply_url is temporary set since Azure need one to create the AAD.
app_id_name = 'https://{}'.format(name)
if not aad_client_app_secret:
aad_client_app_secret = _create_client_secret()
# Delegate Sign In and Read User Profile permissions on Windows Azure Active Directory API
resource_access = ResourceAccess(id="311a71cc-e848-46a1-bdf8-97ff7156d8e6",
additional_properties=None, type="Scope")
# Read directory permissions on Windows Azure Active Directory API
directory_access = ResourceAccess(id="5778995a-e1bf-45b8-affa-663a9f3f4d04",
additional_properties=None, type="Role")
required_osa_aad_access = RequiredResourceAccess(resource_access=[resource_access, directory_access],
additional_properties=None,
resource_app_id="00000002-0000-0000-c000-000000000000")
list_aad_filtered = list(rbac_client.applications.list(filter="identifierUris/any(s:s eq '{}')"
.format(app_id_name)))
if list_aad_filtered:
aad_client_app_id = list_aad_filtered[0].app_id
# Updating reply_url with the correct FQDN information returned by the RP
reply_url = 'https://{}/oauth2callback/Azure%20AD'.format(identifier)
update_application(client=rbac_client.applications,
object_id=list_aad_filtered[0].object_id,
display_name=name,
identifier_uris=[app_id_name],
reply_urls=[reply_url],
homepage=app_id_name,
password=aad_client_app_secret,
required_resource_accesses=[required_osa_aad_access])
logger.info('Updated AAD: %s', aad_client_app_id)
else:
result, _aad_session_key = create_application(client=rbac_client.applications,
display_name=name,
identifier_uris=[app_id_name],
homepage=app_id_name,
password=aad_client_app_secret,
required_resource_accesses=[required_osa_aad_access])
aad_client_app_id = result.app_id
logger.info('Created an AAD: %s', aad_client_app_id)
# Get the TenantID
if aad_tenant_id is None:
profile = Profile(cli_ctx=cli_ctx)
_, _, aad_tenant_id = profile.get_login_credentials()
return OpenShiftManagedClusterAADIdentityProvider(
client_id=aad_client_app_id,
secret=aad_client_app_secret,
tenant_id=aad_tenant_id,
kind='AADIdentityProvider',
customer_admin_group_id=customer_admin_group_id)
def _ensure_service_principal(cli_ctx,
service_principal=None,
client_secret=None,
subscription_id=None,
dns_name_prefix=None,
location=None,
name=None):
# TODO: This really needs to be unit tested.
rbac_client = get_graph_rbac_management_client(cli_ctx)
if not service_principal:
# --service-principal not specified, make one.
if not client_secret:
client_secret = _create_client_secret()
salt = binascii.b2a_hex(os.urandom(3)).decode('utf-8')
url = 'https://{}.{}.{}.cloudapp.azure.com'.format(salt, dns_name_prefix, location)
service_principal, _aad_session_key = _build_service_principal(rbac_client, cli_ctx, name, url, client_secret)
if not service_principal:
raise CLIError('Could not create a service principal with the right permissions. '
'Are you an Owner on this project?')
logger.info('Created a service principal: %s', service_principal)
# add role first before save it
if not _add_role_assignment(cli_ctx, 'Contributor', service_principal):
logger.warning('Could not create a service principal with the right permissions. '
'Are you an Owner on this project?')
else:
# --service-principal specfied, validate --client-secret was too
if not client_secret:
raise CLIError('--client-secret is required if --service-principal is specified')
return {
'client_secret': client_secret,
'service_principal': service_principal,
}
def _create_client_secret():
# Add a special character to satisfy AAD SP secret requirements
special_char = '$'
client_secret = binascii.b2a_hex(os.urandom(10)).decode('utf-8') + special_char
return client_secret
def _get_rg_location(ctx, resource_group_name, subscription_id=None):
groups = cf_resource_groups(ctx, subscription_id=subscription_id)
# Just do the get, we don't need the result, it will error out if the group doesn't exist.
rg = groups.get(resource_group_name)
return rg.location
def _check_cluster_autoscaler_flag(enable_cluster_autoscaler,
min_count,
max_count,
node_count,
agent_pool_profile):
if enable_cluster_autoscaler:
if min_count is None or max_count is None:
raise CLIError('Please specify both min-count and max-count when --enable-cluster-autoscaler enabled')
if int(min_count) > int(max_count):
raise CLIError('Value of min-count should be less than or equal to value of max-count')
if int(node_count) < int(min_count) or int(node_count) > int(max_count):
raise CLIError('node-count is not in the range of min-count and max-count')
agent_pool_profile.min_count = int(min_count)
agent_pool_profile.max_count = int(max_count)
agent_pool_profile.enable_auto_scaling = True
else:
if min_count is not None or max_count is not None:
raise CLIError('min-count and max-count are required for --enable-cluster-autoscaler, please use the flag')
def _validate_autoscaler_update_counts(min_count, max_count, is_enable_or_update):
"""
Validates the min, max, and node count when performing an update
"""
if min_count is None or max_count is None:
if is_enable_or_update:
raise CLIError('Please specify both min-count and max-count when --enable-cluster-autoscaler or '
'--update-cluster-autoscaler is set.')
if min_count is not None and max_count is not None:
if int(min_count) > int(max_count):
raise CLIError('Value of min-count should be less than or equal to value of max-count.')
def _print_or_merge_credentials(path, kubeconfig, overwrite_existing, context_name):
"""Merge an unencrypted kubeconfig into the file at the specified path, or print it to
stdout if the path is "-".
"""
# Special case for printing to stdout
if path == "-":
print(kubeconfig)
return
# ensure that at least an empty ~/.kube/config exists
directory = os.path.dirname(path)
if directory and not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
if not os.path.exists(path):
with os.fdopen(os.open(path, os.O_CREAT | os.O_WRONLY, 0o600), 'wt'):
pass
# merge the new kubeconfig into the existing one
fd, temp_path = tempfile.mkstemp()
additional_file = os.fdopen(fd, 'w+t')
try:
additional_file.write(kubeconfig)
additional_file.flush()
merge_kubernetes_configurations(path, temp_path, overwrite_existing, context_name)
except yaml.YAMLError as ex:
logger.warning('Failed to merge credentials to kube config file: %s', ex)
finally:
additional_file.close()
os.remove(temp_path)
def _remove_nulls(managed_clusters):
"""
Remove some often-empty fields from a list of ManagedClusters, so the JSON representation
doesn't contain distracting null fields.
This works around a quirk of the SDK for python behavior. These fields are not sent
by the server, but get recreated by the CLI's own "to_dict" serialization.
"""
attrs = ['tags']
ap_attrs = ['os_disk_size_gb', 'vnet_subnet_id']
sp_attrs = ['secret']
for managed_cluster in managed_clusters:
for attr in attrs:
if getattr(managed_cluster, attr, None) is None:
delattr(managed_cluster, attr)
if managed_cluster.agent_pool_profiles is not None:
for ap_profile in managed_cluster.agent_pool_profiles:
for attr in ap_attrs:
if getattr(ap_profile, attr, None) is None:
delattr(ap_profile, attr)
for attr in sp_attrs:
if getattr(managed_cluster.service_principal_profile, attr, None) is None:
delattr(managed_cluster.service_principal_profile, attr)
return managed_clusters
def _remove_osa_nulls(managed_clusters):
"""
Remove some often-empty fields from a list of OpenShift ManagedClusters, so the JSON representation
doesn't contain distracting null fields.
This works around a quirk of the SDK for python behavior. These fields are not sent
by the server, but get recreated by the CLI's own "to_dict" serialization.
"""
attrs = ['tags', 'plan', 'type', 'id']
ap_master_attrs = ['name', 'os_type']
net_attrs = ['peer_vnet_id']
for managed_cluster in managed_clusters:
for attr in attrs:
if hasattr(managed_cluster, attr) and getattr(managed_cluster, attr) is None:
delattr(managed_cluster, attr)
for attr in ap_master_attrs:
if getattr(managed_cluster.master_pool_profile, attr, None) is None:
delattr(managed_cluster.master_pool_profile, attr)
for attr in net_attrs:
if getattr(managed_cluster.network_profile, attr, None) is None:
delattr(managed_cluster.network_profile, attr)
return managed_clusters
def _validate_aci_location(norm_location):
"""
Validate the Azure Container Instance location
"""
aci_locations = [
"australiaeast",
"canadacentral",
"centralindia",
"centralus",
"eastasia",
"eastus",
"eastus2",
"eastus2euap",
"japaneast",
"northcentralus",
"northeurope",
"southcentralus",
"southeastasia",
"southindia",
"uksouth",
"westcentralus",
"westus",
"westus2",
"westeurope"
]
if norm_location not in aci_locations:
raise CLIError('Azure Container Instance is not available at location "{}".'.format(norm_location) +
' The available locations are "{}"'.format(','.join(aci_locations)))
def osa_list(cmd, client, resource_group_name=None):
if resource_group_name:
managed_clusters = client.list_by_resource_group(resource_group_name)
else:
managed_clusters = client.list()
return _remove_osa_nulls(list(managed_clusters))
def _format_workspace_id(workspace_id):
workspace_id = workspace_id.strip()
if not workspace_id.startswith('/'):
workspace_id = '/' + workspace_id
if workspace_id.endswith('/'):
workspace_id = workspace_id.rstrip('/')
return workspace_id
def openshift_create(cmd, client, resource_group_name, name, # pylint: disable=too-many-locals
location=None,
compute_vm_size="Standard_D4s_v3",
compute_count=3,
aad_client_app_id=None,
aad_client_app_secret=None,
aad_tenant_id=None,
vnet_prefix="10.0.0.0/8",
subnet_prefix="10.0.0.0/24",
vnet_peer=None,
tags=None,
no_wait=False,
workspace_id=None,
customer_admin_group_id=None):
logger.warning('Support for the creation of ARO 3.11 clusters ends 30 Nov 2020. Please see aka.ms/aro/4 for information on switching to ARO 4.') # pylint: disable=line-too-long
if location is None:
location = _get_rg_location(cmd.cli_ctx, resource_group_name)
agent_pool_profiles = []
agent_node_pool_profile = OpenShiftManagedClusterAgentPoolProfile(
name='compute', # Must be 12 chars or less before ACS RP adds to it
count=int(compute_count),
vm_size=compute_vm_size,
os_type="Linux",
role=OpenShiftAgentPoolProfileRole.compute,
subnet_cidr=subnet_prefix
)
agent_infra_pool_profile = OpenShiftManagedClusterAgentPoolProfile(
name='infra', # Must be 12 chars or less before ACS RP adds to it
count=int(3),
vm_size="Standard_D4s_v3",
os_type="Linux",
role=OpenShiftAgentPoolProfileRole.infra,
subnet_cidr=subnet_prefix
)
agent_pool_profiles.append(agent_node_pool_profile)
agent_pool_profiles.append(agent_infra_pool_profile)
agent_master_pool_profile = OpenShiftManagedClusterAgentPoolProfile(
name='master', # Must be 12 chars or less before ACS RP adds to it
count=int(3),
vm_size="Standard_D4s_v3",
os_type="Linux",
subnet_cidr=subnet_prefix
)
identity_providers = []
create_aad = False
# Validating if the cluster is not existing since we are not supporting the AAD rotation on OSA for now
try:
client.get(resource_group_name, name)
except CloudError:
# Validating if aad_client_app_id aad_client_app_secret aad_tenant_id are set
if aad_client_app_id is None and aad_client_app_secret is None and aad_tenant_id is None:
create_aad = True
osa_aad_identity = _ensure_osa_aad(cmd.cli_ctx,
aad_client_app_id=aad_client_app_id,
aad_client_app_secret=aad_client_app_secret,
aad_tenant_id=aad_tenant_id, identifier=None,
name=name, create=create_aad,
customer_admin_group_id=customer_admin_group_id)
identity_providers.append(
OpenShiftManagedClusterIdentityProvider(
name='Azure AD',
provider=osa_aad_identity
)
)
auth_profile = OpenShiftManagedClusterAuthProfile(identity_providers=identity_providers)
default_router_profile = OpenShiftRouterProfile(name='default')
if vnet_peer is not None:
from msrestazure.tools import is_valid_resource_id, resource_id
if not is_valid_resource_id(vnet_peer):
vnet_peer = resource_id(
subscription=get_subscription_id(cmd.cli_ctx),
resource_group=resource_group_name,
namespace='Microsoft.Network', type='virtualNetwork',
name=vnet_peer
)
if workspace_id is not None:
workspace_id = _format_workspace_id(workspace_id)
monitor_profile = OpenShiftManagedClusterMonitorProfile(enabled=True, workspace_resource_id=workspace_id) # pylint: disable=line-too-long
else:
monitor_profile = None
network_profile = NetworkProfile(vnet_cidr=vnet_prefix, peer_vnet_id=vnet_peer)
osamc = OpenShiftManagedCluster(
location=location, tags=tags,
open_shift_version="v3.11",
network_profile=network_profile,
auth_profile=auth_profile,
agent_pool_profiles=agent_pool_profiles,
master_pool_profile=agent_master_pool_profile,
router_profiles=[default_router_profile],
monitor_profile=monitor_profile)
try:
# long_running_operation_timeout=300
result = sdk_no_wait(no_wait, client.create_or_update,
resource_group_name=resource_group_name, resource_name=name, parameters=osamc)
result = LongRunningOperation(cmd.cli_ctx)(result)
instance = client.get(resource_group_name, name)
_ensure_osa_aad(cmd.cli_ctx,
aad_client_app_id=osa_aad_identity.client_id,
aad_client_app_secret=osa_aad_identity.secret,
aad_tenant_id=osa_aad_identity.tenant_id, identifier=instance.public_hostname,
name=name, create=create_aad)
except CloudError as ex:
if "The resource type could not be found in the namespace 'Microsoft.ContainerService" in ex.message:
raise CLIError('Please make sure your subscription is whitelisted to use this service. https://aka.ms/openshift/managed') # pylint: disable=line-too-long
if "No registered resource provider found for location" in ex.message:
raise CLIError('Please make sure your subscription is whitelisted to use this service. https://aka.ms/openshift/managed') # pylint: disable=line-too-long
raise ex
def openshift_show(cmd, client, resource_group_name, name):
logger.warning('Support for existing ARO 3.11 clusters ends June 2022. Please see aka.ms/aro/4 for information on switching to ARO 4.') # pylint: disable=line-too-long
mc = client.get(resource_group_name, name)
return _remove_osa_nulls([mc])[0]
def openshift_scale(cmd, client, resource_group_name, name, compute_count, no_wait=False):
logger.warning('Support for existing ARO 3.11 clusters ends June 2022. Please see aka.ms/aro/4 for information on switching to ARO 4.') # pylint: disable=line-too-long
instance = client.get(resource_group_name, name)
# TODO: change this approach when we support multiple agent pools.
idx = 0
for i in range(len(instance.agent_pool_profiles)):
if instance.agent_pool_profiles[i].name.lower() == "compute":
idx = i
break
instance.agent_pool_profiles[idx].count = int(compute_count) # pylint: disable=no-member
# null out the AAD profile and add manually the masterAP name because otherwise validation complains
instance.master_pool_profile.name = "master"
instance.auth_profile = None
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
def openshift_monitor_enable(cmd, client, resource_group_name, name, workspace_id, no_wait=False):
logger.warning('Support for existing ARO 3.11 clusters ends June 2022. Please see aka.ms/aro/4 for information on switching to ARO 4.') # pylint: disable=line-too-long
instance = client.get(resource_group_name, name)
workspace_id = _format_workspace_id(workspace_id)
monitor_profile = OpenShiftManagedClusterMonitorProfile(enabled=True, workspace_resource_id=workspace_id) # pylint: disable=line-too-long
instance.monitor_profile = monitor_profile
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
def openshift_monitor_disable(cmd, client, resource_group_name, name, no_wait=False):
logger.warning('Support for existing ARO 3.11 clusters ends June 2022. Please see aka.ms/aro/4 for information on switching to ARO 4.') # pylint: disable=line-too-long
instance = client.get(resource_group_name, name)
monitor_profile = OpenShiftManagedClusterMonitorProfile(enabled=False, workspace_resource_id=None) # pylint: disable=line-too-long
instance.monitor_profile = monitor_profile
return sdk_no_wait(no_wait, client.create_or_update, resource_group_name, name, instance)
| 45.860599
| 222
| 0.650339
|
6fcb4244480f24d2f1e653715e0c74f7642ab8c1
| 1,159
|
py
|
Python
|
data/p2DJ/New/R2/benchmark/startPyquil189.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/R2/benchmark/startPyquil189.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p2DJ/New/R2/benchmark/startPyquil189.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=2
# total number=12
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += X(1) # number=2
prog += CNOT(0,1) # number=4
prog += H(1) # number=9
prog += CZ(0,1) # number=10
prog += H(1) # number=11
prog += X(1) # number=8
prog += X(1) # number=5
prog += CNOT(0,1) # number=6
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('1q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil189.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| 22.288462
| 64
| 0.603969
|
95a8324d4aa05cf260cdf71829cd9f559f201db5
| 2,619
|
py
|
Python
|
SarvLibrary/ErrorCorrection/bloocoo/test/bloocootest/libs/TestExec.py
|
cwright7101/llvm_sarvavid
|
7567d617a7be78fecfde71ab04ebd8e9506a64e4
|
[
"MIT"
] | null | null | null |
SarvLibrary/ErrorCorrection/bloocoo/test/bloocootest/libs/TestExec.py
|
cwright7101/llvm_sarvavid
|
7567d617a7be78fecfde71ab04ebd8e9506a64e4
|
[
"MIT"
] | null | null | null |
SarvLibrary/ErrorCorrection/bloocoo/test/bloocootest/libs/TestExec.py
|
cwright7101/llvm_sarvavid
|
7567d617a7be78fecfde71ab04ebd8e9506a64e4
|
[
"MIT"
] | null | null | null |
import os
from os import listdir
from os.path import exists, isfile, join, basename, splitext, getsize
import shutil
from math import ceil
from TestReadCorrection import *
#-------------------------------------------------------------
# * setup_test
#
#Prepare the tests for a new genome
#Currently this method:
# - determine the genome size
# - determine the number of reads needed depending of cover
#-------------------------------------------------------------
def setup_test(p):
if p["genome_filename"] != "":
p["genome_size"] = getsize(join("../../genomes", p["genome_filename"]))
#Calc the reads_count depending of cover
#offset = (p["reads_size"]-p["kmer_size"]+1) / float(p["reads_size"])
#p["reads_count"] = int(ceil((p["cover"]*p["genome_size"]) / (p["reads_size"]*offset)))
p["reads_count"] = (p["cover"]*p["genome_size"]) / p["reads_size"]
#-------------------------------------------------------------
# * execute_test
#
#fonction permettant d'executer un test avec les variables parametrables comme arguments
#-------------------------------------------------------------
def execute_test(p):
TestReadCorrection.main(p)
#if p["regenerate_reads"]:
# regen = "regen"
#else:
# regen = "no_regen"
#os.system("python ../../main.py " + p["result_filename_prefix"] + " " + str(p["genome_size"]) + " " + str(p["reads_size"]) +\
#" " + str(p["reads_count"]) + " " + str(p["kmer_size"]) + " " + str(p["coverage_threshold"]) + " " +\
#str(p["error_rate"]) + " " + str(p["nb_kmer_checked"]) + " " + regen + " " + p["genome_filename"])
#-------------------------------------------------------------
# * execute_graph
#
#fonction permettant de creer un graphe en prennant comme donnee
#le dernier tabbed file cree par les appels successif a execute_test()
#-------------------------------------------------------------
def execute_graph(p):
if not exists("test_result/tabs"):
os.mkdir("test_result/tabs")
if not exists("test_result/graphs"):
os.mkdir("test_result/graphs")
#--- get the last result file added
tab_filename = None
filenames = listdir("test_result")
for filename in filenames:
complete_filename = join("test_result", filename)
if isfile(complete_filename):
tab_filename = complete_filename
break
graph_filename = splitext(basename(tab_filename))[0] + ".png"
graph_filename = join("test_result/graphs", graph_filename)
#---
command = "Rscript " + join("../../Rscripts", p["R_script_filename"]) + " " + tab_filename + " " + graph_filename
os.system(command)
#--- Move the last result file to tabs dir
shutil.move(tab_filename, "test_result/tabs")
| 34.92
| 127
| 0.589156
|
8b8f948a83bdde3179cb1b92e1ec2d43efd480cc
| 10,236
|
py
|
Python
|
bootstrap/p1.5.0/src/operations/tenant_validator.py
|
apurwaj2/df-on-k8s
|
1aecb0bc293d008c5a2384df32ad434bfcc51caa
|
[
"Apache-2.0"
] | null | null | null |
bootstrap/p1.5.0/src/operations/tenant_validator.py
|
apurwaj2/df-on-k8s
|
1aecb0bc293d008c5a2384df32ad434bfcc51caa
|
[
"Apache-2.0"
] | null | null | null |
bootstrap/p1.5.0/src/operations/tenant_validator.py
|
apurwaj2/df-on-k8s
|
1aecb0bc293d008c5a2384df32ad434bfcc51caa
|
[
"Apache-2.0"
] | null | null | null |
import os
import subprocess
import base64
import time
from common.mapr_exceptions.ex import NotFoundException
from common.mapr_logger.log import Log
from common.const import Constants
from operations.operationsbase import OperationsBase
from operations.yamlfile import YamlFile
# Purpose:
# functions in this class help stand-up
# the Tenant infrastructure
#
class TenantValidator(OperationsBase):
MYDIR = os.path.abspath(os.path.dirname(__file__))
def __init__(self):
super(TenantValidator, self).__init__()
self.tenant_dir = os.path.abspath(os.path.join(self.prereq_dir, "system-tenant-validator"))
if not os.path.exists(self.tenant_dir):
raise NotFoundException(self.tenant_dir)
self.load_yaml_dict()
def load_yaml_dict(self):
file_name = self.check_exists(self.tenant_dir, "tenantvalid-csr.yaml")
tenantvalidator_csr_yaml_file = YamlFile("tenantvalidator-csr", "Tenant Validator Service CSR", file_name,
"tenantvalidator_csr_component", True, True)
self.yamls.append(tenantvalidator_csr_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-sa.yaml")
tenantvalidator_sa_yaml_file = YamlFile("tenantvalidator-sa", "Tenant Validator Service Account", file_name,
"tenantvalidator_components", True)
self.yamls.append(tenantvalidator_sa_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-cr.yaml")
tenantvalidator_cr_yaml_file = YamlFile("tenantvalidator-cr", "Tenant Validator Cluster Role", file_name,
"tenantvalidator_components", True)
self.yamls.append(tenantvalidator_cr_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-crb.yaml")
tenantvalidator_crb_yaml_file = YamlFile("tenantvalidator-crb", "Tenant Validator Cluster Role Binding",
file_name, "tenantvalidator_components", True)
self.yamls.append(tenantvalidator_crb_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-deploy.yaml")
tenantvalidator_deployment_yaml_file = YamlFile("tenantvalidator-deployment", "Tenant Validator Deployment",
file_name, "tenantvalidator_deployemt_component", True, True)
self.yamls.append(tenantvalidator_deployment_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-svc.yaml")
tenantvalidator_service_yaml_file = YamlFile("tenantvalidator-service", "Tenant Validator Service", file_name,
"tenantvalidator_components", True)
self.yamls.append(tenantvalidator_service_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-mwhconfig.yaml")
tenantvalidator_mwc_yaml_file = YamlFile("tenantvalidator-mwc",
"Tenant Validator Mutating Admission Controller", file_name,
"tenantvalidator_components", True)
self.yamls.append(tenantvalidator_mwc_yaml_file)
file_name = self.check_exists(self.tenant_dir, "tenantvalid-vwhconfig.yaml")
tenantvalidator_vwc_yaml_file = YamlFile("tenantvalidator-vwc",
"Tenant Validator Validating Admission Controller", file_name,
"tenantvalidator_components", True)
self.yamls.append(tenantvalidator_vwc_yaml_file)
def update_replace_yaml(self):
# update yaml substitution
with open(self.dfile('cert')) as readfile:
file_contents = readfile.read()
data_bytes = file_contents.encode("utf-8")
OperationsBase.replace_dict["{tenantvalidator-csr-data}"] = base64.b64encode(data_bytes)
def genservicecert(self):
Log.info(os.linesep + "Generating Tenant Validator Service Cert...", stdout=True)
Log.info("Generating new self-signed cert...")
os.chdir(self.MYDIR)
if not os.path.exists('services'):
os.mkdir('services')
if not os.path.exists(self.dfile('key')):
self.openssl('genrsa', '-out', self.dfile('key'), str(Constants.KEY_SIZE))
config = open(self.dfile('config'), 'w')
config.write(Constants.OPENSSL_CONFIG_TEMPLATE % {'service': 'tenant-validator-svc',
'namespace': 'hpe-system'})
config.close()
self.openssl('req', '-new', '-key', self.dfile('key'), '-out', self.dfile('cert'),
'-config', self.dfile('config'))
return True
def genk8csr(self):
# clean-up any previously created CSR for our service. Ignore errors if not present.
uninstallable_yaml_types = ["tenantvalidator_csr_component"]
self.uninstall_components(uninstallable_yaml_types=uninstallable_yaml_types)
# create new csr
installable_yaml_types = ["tenantvalidator_csr_component"]
self.install_components(installable_yaml_types=installable_yaml_types, upgrade_mode=False)
# approve and fetch the signed certificate/pkg/k8client
Log.info(os.linesep + "Approving the Tenant Validator Service CSR...", True)
if self.run_kubectl_certificate("approve tenant-validator-svc.hpe-system"):
Log.info(os.linesep + "Approved the Tenant Validator Service CSR.")
# this is to try and avoid the failures we've been seeing, perhaps time related
# verify CSR has been created
for x in range(3):
encoded_server_cert = self.run_kubectl_get("csr tenant-validator-svc.hpe-system -o"
"jsonpath={.status.certificate}")
if not encoded_server_cert:
Log.error("After approving the Tenant Validator Service CSR, the signed certificate did not "
"appear on the resource.")
return False
elif encoded_server_cert == "<no response>":
Log.info("After approving the Tenant Validator Service CSR, was not able to get a response "
"back from the API server for it. Attempt " + str((x+1)) + " of 3", True)
time.sleep(2)
if not encoded_server_cert:
Log.error("After approving the Tenant Validator Service CSR, the signed certificate did not "
"appear on the resource.")
return False
elif encoded_server_cert == "<no response>":
Log.info("After approving the Tenant Validator Service CSR, was not able to get a response "
"back from the API server for it in all 3 attempts")
else:
Log.info("Verified the Tenant Validator Service CSR was signed.")
# Invalid base64-encoded string: number of data characters (1) cannot be 1 more than a multiple of 4
encoded_server_cert_len = len(encoded_server_cert)
cert_adjusted_len = 32 - (encoded_server_cert_len % 32)
repeated_equals = "=" * cert_adjusted_len
adjusted_cert = "{0}{1}".format(encoded_server_cert, repeated_equals)
decoded_cert = base64.b64decode(adjusted_cert)
file1 = open(self.dfile('csrcert'), 'wb')
file1.write(decoded_cert)
file1.close()
OperationsBase.replace_dict["{tenantvalidator-servercert-encoded}"] = encoded_server_cert
return True
def run_install(self, upgrade_mode=False):
# CERTIFICATE
self.genservicecert()
self.update_replace_yaml()
# CERTIFICATE SIGNING REQUEST
self.genk8csr()
# CERTS SECRET
Log.info(os.linesep + "Deleting previously created Tenant Validator Service certs secret...")
self.delete_tenantvalidator_secret()
Log.info(os.linesep + "Creating Tenant Validator Service Certs Secret ...", True)
if self.create_tenantvalidator_secret(self.dfile('key'), self.dfile('csrcert')):
Log.info("Created Tenant Validator Service certs secret.")
uninstallable_yaml_types = ["tenantvalidator_deployemt_component"]
self.uninstall_components(uninstallable_yaml_types=uninstallable_yaml_types)
installable_yaml_types = ["tenantvalidator_components", "tenantvalidator_deployemt_component"]
self.install_components(installable_yaml_types=installable_yaml_types, upgrade_mode=upgrade_mode)
return True
def run_uninstall(self):
uninstallable_yaml_types = ["tenantvalidator_components", "tenantvalidator_csr_component",
"tenantvalidator_deployemt_component"]
self.uninstall_components(uninstallable_yaml_types=uninstallable_yaml_types)
# SECRET
Log.info(os.linesep + "Deleting Tenant Validator Service certs Secret...", True)
self.delete_tenantvalidator_secret()
Log.info("Deleted Tenant Validator Service certs Secret.")
return True
def create_tenantvalidator_secret(self, key_file, cert_file):
cmd = 'kubectl create secret generic tenant-validator-certs -n hpe-system ' \
'--from-file=key.pem={0} ' \
'--from-file=cert.pem={1} '.format(key_file, cert_file)
return self._run(cmd)
def delete_tenantvalidator_secret(self):
cmd = 'kubectl delete secret tenant-validator-certs -n hpe-system --ignore-not-found'
return self._run(cmd)
# Helper used to put together filenames for certs
@staticmethod
def dfile(ext):
return os.path.join('services', '%s.%s' % ('tenant-validator-svc', ext))
@staticmethod
def openssl(*args):
cmdline = [Constants.OPENSSL] + list(args)
fnull = open(os.devnull, 'w')
subprocess.check_call(cmdline, stdout=fnull, stderr=subprocess.STDOUT)
| 52.762887
| 118
| 0.644295
|
a7c5bb4f3f5a06488252b6c7ae19b079455557d0
| 2,492
|
py
|
Python
|
scripts/hiddenapi/signature_patterns.py
|
t-head-aosp/platform-build-soong
|
582fc2d1dde6c70687e6a0bea192f2a2ef67bbd5
|
[
"Apache-2.0"
] | null | null | null |
scripts/hiddenapi/signature_patterns.py
|
t-head-aosp/platform-build-soong
|
582fc2d1dde6c70687e6a0bea192f2a2ef67bbd5
|
[
"Apache-2.0"
] | null | null | null |
scripts/hiddenapi/signature_patterns.py
|
t-head-aosp/platform-build-soong
|
582fc2d1dde6c70687e6a0bea192f2a2ef67bbd5
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Generate a set of signature patterns from the modular flags generated by a
bootclasspath_fragment that can be used to select a subset of monolithic flags
against which the modular flags can be compared.
"""
import argparse
import csv
def dict_reader(input):
return csv.DictReader(input, delimiter=',', quotechar='|', fieldnames=['signature'])
def produce_patterns_from_file(file):
with open(file, 'r') as f:
return produce_patterns_from_stream(f)
def produce_patterns_from_stream(stream):
# Read in all the signatures into a list and remove member names.
patterns = set()
for row in dict_reader(stream):
signature = row['signature']
text = signature.removeprefix("L")
# Remove the class specific member signature
pieces = text.split(";->")
qualifiedClassName = pieces[0]
# Remove inner class names as they cannot be separated from the containing outer class.
pieces = qualifiedClassName.split("$", maxsplit=1)
pattern = pieces[0]
patterns.add(pattern)
patterns = list(patterns)
patterns.sort()
return patterns
def main(args):
args_parser = argparse.ArgumentParser(description='Generate a set of signature patterns that select a subset of monolithic hidden API files.')
args_parser.add_argument('--flags', help='The stub flags file which contains an entry for every dex member')
args_parser.add_argument('--output', help='Generated signature prefixes')
args = args_parser.parse_args(args)
# Read in all the patterns into a list.
patterns = produce_patterns_from_file(args.flags)
# Write out all the patterns.
with open(args.output, 'w') as outputFile:
for pattern in patterns:
outputFile.write(pattern)
outputFile.write("\n")
if __name__ == "__main__":
main(sys.argv[1:])
| 37.19403
| 146
| 0.713483
|
5ba66e2cdd66ae27f4d45c3e4665dc515a693dff
| 715
|
py
|
Python
|
mergify_engine/tests/__init__.py
|
Madhu-1/mergify-engine
|
9ca4f4697cc825230b1584f5587f10393cabc971
|
[
"Apache-2.0"
] | null | null | null |
mergify_engine/tests/__init__.py
|
Madhu-1/mergify-engine
|
9ca4f4697cc825230b1584f5587f10393cabc971
|
[
"Apache-2.0"
] | null | null | null |
mergify_engine/tests/__init__.py
|
Madhu-1/mergify-engine
|
9ca4f4697cc825230b1584f5587f10393cabc971
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
#
# Copyright © 2019 Mehdi Abaakouk <sileht@sileht.net>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from datadog import statsd
statsd.socket = mock.Mock()
| 29.791667
| 75
| 0.746853
|
9d188c7738bdd426cadea81ee82b27e3c5f2b676
| 371
|
py
|
Python
|
openprescribing/frontend/migrations/0008_auto_20161121_1236.py
|
annapowellsmith/openpresc
|
cfa9fb07d6fc2ee304159c04fcc132cefcf78745
|
[
"MIT"
] | 91
|
2015-10-14T09:10:32.000Z
|
2022-03-10T22:09:21.000Z
|
openprescribing/frontend/migrations/0008_auto_20161121_1236.py
|
annapowellsmith/openpresc
|
cfa9fb07d6fc2ee304159c04fcc132cefcf78745
|
[
"MIT"
] | 1,828
|
2015-12-04T14:52:27.000Z
|
2022-03-31T08:51:14.000Z
|
openprescribing/frontend/migrations/0008_auto_20161121_1236.py
|
HDRUK/openprescribing
|
510e8c07e841cd42284c109774d1730b6463f376
|
[
"MIT"
] | 27
|
2015-12-03T18:26:56.000Z
|
2021-01-09T21:58:53.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-11-21 12:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('frontend', '0007_add_cost_per_fields'),
]
operations = [
migrations.RemoveField(
model_name='prescription',
name='net_cost',
),
]
| 18.55
| 49
| 0.592992
|
d55d0b76ae528252498e5b6855900fd8d7f1c19d
| 875
|
py
|
Python
|
RAT.py
|
HugoMskn/Telegram-RAT
|
53989b2509b1c844c6a33f670aece5f8dbf15305
|
[
"MIT"
] | 375
|
2020-03-17T06:20:50.000Z
|
2022-03-29T22:27:23.000Z
|
RAT.py
|
HugoMskn/Telegram-RAT
|
53989b2509b1c844c6a33f670aece5f8dbf15305
|
[
"MIT"
] | 44
|
2020-04-06T22:37:59.000Z
|
2020-11-15T15:53:39.000Z
|
RAT.py
|
HugoMskn/Telegram-RAT
|
53989b2509b1c844c6a33f670aece5f8dbf15305
|
[
"MIT"
] | 173
|
2020-04-01T17:17:26.000Z
|
2022-03-24T13:28:15.000Z
|
from API import *
# Token/ID
TelegramToken = 'TOKEN'
TelegramChatID = 'ID'
# Run the script as administrator
AdminRightsRequired = False
# Disable Task Manager at first start
DisableTaskManager = False
# Disable Registry Editor at first start
DisableRegistryTools = False
# Process protection from termination and deletion
ProcessBSODProtectionEnabled = False
# Add to startup at first start
AutorunEnabled = False
# Installation directory
InstallPath = 'C:\\ProgramData\\'
# Task name in Task Scheduler
AutorunName = 'OneDrive Update'
# The name of the process in the Task Manager
ProcessName = 'System.exe'
# Display a message at first start
DisplayMessageBox = False
# Your Message (will be displayed at start)
Message = 'Message'
# Directory for saving trojan temporary files
Directory = 'C:\\Windows\\Temp\\TelegramRAT\\'
| 23.026316
| 51
| 0.740571
|
da1a5a9cb5d31889229861bd19300a36b4dbc317
| 1,122
|
py
|
Python
|
datacombine/datacombine/migrations/0004_auto_20170828_0145.py
|
Crimson-Star-Software/data-combine
|
3209ae2316afc38417e51c3261494d6e7d2e4e2a
|
[
"MIT"
] | null | null | null |
datacombine/datacombine/migrations/0004_auto_20170828_0145.py
|
Crimson-Star-Software/data-combine
|
3209ae2316afc38417e51c3261494d6e7d2e4e2a
|
[
"MIT"
] | 3
|
2020-02-11T23:14:53.000Z
|
2021-06-10T18:32:57.000Z
|
datacombine/datacombine/migrations/0004_auto_20170828_0145.py
|
Crimson-Star-Software/data-combine
|
3209ae2316afc38417e51c3261494d6e7d2e4e2a
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-28 01:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('datacombine', '0003_auto_20170826_1452'),
]
operations = [
migrations.AlterField(
model_name='address',
name='cc_id',
field=models.CharField(max_length=36, unique=True),
),
migrations.AlterField(
model_name='constantcontactlist',
name='cc_id',
field=models.IntegerField(unique=True),
),
migrations.AlterField(
model_name='contact',
name='cc_id',
field=models.IntegerField(unique=True),
),
migrations.AlterField(
model_name='emailaddress',
name='cc_id',
field=models.CharField(max_length=36, unique=True),
),
migrations.AlterField(
model_name='note',
name='cc_id',
field=models.CharField(max_length=36, unique=True),
),
]
| 27.365854
| 63
| 0.57041
|
5baecd15c357cc3e7d4057cf878fddcdbf32eb36
| 2,591
|
py
|
Python
|
rasa/core/channels/callback.py
|
Tao2301230/rasa_learn
|
50093cbc696ee72fec81ab69d74a80399c6277ca
|
[
"Apache-2.0"
] | 1
|
2020-09-23T11:04:38.000Z
|
2020-09-23T11:04:38.000Z
|
rasa/core/channels/callback.py
|
Tao2301230/rasa_learn
|
50093cbc696ee72fec81ab69d74a80399c6277ca
|
[
"Apache-2.0"
] | null | null | null |
rasa/core/channels/callback.py
|
Tao2301230/rasa_learn
|
50093cbc696ee72fec81ab69d74a80399c6277ca
|
[
"Apache-2.0"
] | null | null | null |
import logging
from typing import Text, Dict, Optional, Callable, Awaitable, Any
from sanic import Blueprint, response
from sanic.request import Request
from rasa.core.channels.channel import (
CollectingOutputChannel,
UserMessage,
InputChannel,
)
from rasa.core.channels.rest import RestInput
from rasa.utils.endpoints import EndpointConfig, ClientResponseError
from sanic.response import HTTPResponse
logger = logging.getLogger(__name__)
class CallbackOutput(CollectingOutputChannel):
@classmethod
def name(cls) -> Text:
return "callback"
def __init__(self, endpoint: EndpointConfig) -> None:
self.callback_endpoint = endpoint
super().__init__()
async def _persist_message(self, message: Dict[Text, Any]) -> None:
await super()._persist_message(message)
try:
await self.callback_endpoint.request("post", content_type="application/json", json=message)
except ClientResponseError as e:
logger.error("Failed to send output message to callback. Status: {} Response: {}" "".format(e.status, e.text))
class CallbackInput(RestInput):
"""A custom REST http input channel that responds using a callback server.
Incoming messages are received through a REST interface. Responses
are sent asynchronously by calling a configured external REST endpoint."""
@classmethod
def name(cls) -> Text:
return "callback"
@classmethod
def from_credentials(cls, credentials: Optional[Dict[Text, Any]]) -> InputChannel:
return cls(EndpointConfig.from_dict(credentials))
def __init__(self, endpoint: EndpointConfig) -> None:
self.callback_endpoint = endpoint
def blueprint(self, on_new_message: Callable[[UserMessage], Awaitable[Any]]) -> Blueprint:
callback_webhook = Blueprint("callback_webhook", __name__)
@callback_webhook.route("/", methods=["GET"])
async def health(_: Request):
return response.json({"status": "ok"})
@callback_webhook.route("/webhook", methods=["POST"])
async def webhook(request: Request) -> HTTPResponse:
sender_id = await self._extract_sender(request)
text = self._extract_message(request)
collector = self.get_output_channel()
await on_new_message(UserMessage(text, collector, sender_id, input_channel=self.name()))
return response.text("success")
return callback_webhook
def get_output_channel(self) -> CollectingOutputChannel:
return CallbackOutput(self.callback_endpoint)
| 34.546667
| 122
| 0.702817
|
30f73567e8bbdd640429b343ab1bd599c603702f
| 3,058
|
py
|
Python
|
noise_histo.py
|
mazpar/G305-paper-2
|
5afcd372de9819fc6a9b816f13c6b14bfc50d3d7
|
[
"MIT"
] | null | null | null |
noise_histo.py
|
mazpar/G305-paper-2
|
5afcd372de9819fc6a9b816f13c6b14bfc50d3d7
|
[
"MIT"
] | null | null | null |
noise_histo.py
|
mazpar/G305-paper-2
|
5afcd372de9819fc6a9b816f13c6b14bfc50d3d7
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 22 12:09:31 2021
Draw histogram of noise distribution in fits map
@author: pmazumdar
"""
from astropy.io import fits
import numpy as np
import matplotlib.pyplot as plt
import aplpy
import seaborn as sns
import scipy.stats as sst
#filepath = "/home/pmazumdar/Documents/LASMA/Reduction/class_maps/temp/"
filepath = "/home/pmazumdar/Documents/LASMA/Reduction/datacubes/G346/lmv_maps/"
#Load the FITS files
#noise_hdu_12 = fits.open(filepath+"G305_12CO_noise.fits")[0]
#noise_hdu_13 = fits.open(filepath+"G305_13CO_noise.fits")[0]
noise_hdu_12 = fits.open(filepath+"G346_12CO_noise.fits")[0]
noise_hdu_13 = fits.open(filepath+"G346_13CO_noise.fits")[0]
noise_hdu_SED = fits.open(filepath+"G346_SEDIGISM_13CO21_noise.fits")[0]
#Flatten the dataset
noise_12 = noise_hdu_12.data.flatten()
noise_13 = noise_hdu_13.data.flatten()
noise_SED = noise_hdu_SED.data.flatten()
#only for G346
noise_12[noise_12>0.6] = np.nan
noise_13[noise_13>0.6] = np.nan
#Plot the histogram
plt.rcParams.update({'font.size':14})
fig,ax = plt.subplots(ncols=1,nrows=1,figsize=(6,5))
sns.histplot(noise_12,\
ax=ax,\
fill=0,\
alpha=0.6,\
log_scale=10,\
#binwidth=0.05,\
element = 'step',\
color = "tab:blue",\
label = r'$^{12}$CO (3-2)',\
legend=False)
sns.histplot(noise_13,\
ax=ax,\
fill=0,\
alpha=0.6,\
#binwidth=0.05,\
log_scale=10,\
element = 'step',\
color = "firebrick",\
label = r'$^{13}$CO (3-2)',\
legend=False)
sns.histplot(noise_SED,\
ax=ax,\
fill=0,\
alpha=0.6,\
#binwidth=0.05,\
log_scale=10,\
element = 'step',\
color = "tab:green",\
label = r'$^{13}$CO (2-1)',\
legend=False)
#G305
#ax.axvline(0.13,linewidth=1.2,linestyle='dotted',color='tab:blue')
#ax.axvline(0.29,linewidth=1.2,linestyle='dotted',color='firebrick')
#G346
ax.axvline(0.19,linewidth=1.2,linestyle='dotted',color='tab:blue')
ax.text(0.19,100,'0.19 K',color='tab:blue',rotation=90)
ax.axvline(0.22,linewidth=1.2,linestyle='dotted',color='firebrick')
ax.text(0.22,100,'0.22 K',color='firebrick',rotation=90)
ax.axvline(0.58,linewidth=1.2,linestyle='dotted',color='tab:green')
ax.text(0.58,100,'0.58 K',color='tab:green',rotation=90)
#ax[0].set_xlim(0.01,3)
#ax[1].set_xlim(0.01,3)
#ax.set_xlim(0.02,3)
#ax[0].set_yscale('log')
#ax[1].set_yscale('log')
#ax[1].set_ylabel(r'Number of pixels')
#ax[0].set_ylabel('')
#ax[0].set_xlabel(r'RMS Noise $^{12}$CO [K]')
#ax[1].set_xlabel(r'RMS Noise $^{13}$CO [K]')
ax.set_xlabel(r'RMS Noise [K]')
ax.set_ylabel(r'Number of pixels')
plt.legend(loc='upper right',frameon=False, labelspacing=0.2, fontsize=12)
plt.tight_layout()
#plt.savefig(filepath+"plots/noise_histogram.pdf")
plt.savefig(filepath+"noise_histogram_G346.pdf")
| 29.12381
| 79
| 0.631785
|
df304545ff21a101acf704dcc71075628e6c719e
| 4,646
|
py
|
Python
|
training.py
|
aprams/small-data-classification
|
5f6c3042fc792d31756168a2c88251ee6de1d7e0
|
[
"MIT"
] | 1
|
2018-07-02T20:20:04.000Z
|
2018-07-02T20:20:04.000Z
|
training.py
|
aprams/small-data-classification
|
5f6c3042fc792d31756168a2c88251ee6de1d7e0
|
[
"MIT"
] | null | null | null |
training.py
|
aprams/small-data-classification
|
5f6c3042fc792d31756168a2c88251ee6de1d7e0
|
[
"MIT"
] | null | null | null |
import data_augment
import nail_model
import argparse
import os
import glob
import numpy as np
import keras
import json
import matplotlib.pyplot as plt
import scipy.misc
from keras.preprocessing.image import ImageDataGenerator
from sklearn.model_selection import train_test_split
from keras.applications.mobilenetv2 import preprocess_input
np.random.seed(1337)
# Training & Validation data location
TRAIN_DATA_DIR = "./data/"
# Data folders
MODEL_TMP_SAVE_DIR = "tmp"
MODEL_SAVE_DIR = "model"
MODEL_FILENAME = "model.h5"
CONFIG_FILE_NAME = "config.json"
# Training constants
BATCH_SIZE = 50
LEARNING_RATE = 1e-2
IMAGE_SIZE = 224
EPOCHS = 250
AUGMENT_FACTOR = 40
TEST_SPLIT = 0.005
# Command line arguments
parser = argparse.ArgumentParser()
parser.add_argument("-bs", "--batch-size", default=BATCH_SIZE, type=int)
parser.add_argument("-lr", "--learning-rate", default=LEARNING_RATE, type=float)
parser.add_argument("-ep", "--epochs", default=EPOCHS, type=int,
help="Number of epochs until stopping the training procedure")
parser.add_argument("-is", "--image-size", default=IMAGE_SIZE, type=int,
help="Preprocessed image sizes")
parser.add_argument("-td", "--train-dir", default=TRAIN_DATA_DIR, type=str,
help="Training data directory containing 'good' and 'bad' class folders")
parser.add_argument("-mfd", "--model-save-dir", default=MODEL_SAVE_DIR, type=str,
help="Where to save the final model to")
parser.add_argument("-mfn", "--model-filename", default=MODEL_FILENAME, type=str,
help="final model filename (should end with .h5)")
parser.add_argument("-aug", "--augment-factor", default=AUGMENT_FACTOR, type=int,
help="Iterations of Data augmentations, 0 for no augmentation")
parser.add_argument("-ts", "--test-split", default=TEST_SPLIT, type=int,
help="Split of the data that is used for validation")
args = parser.parse_args()
print(args)
DATA_GOOD_DIR = os.path.join(args.train_dir, "good")
DATA_BAD_DIR = os.path.join(args.train_dir, "bad")
if __name__ == "__main__":
tmp_labels = []
tmp_images = []
for image_path in glob.glob(os.path.join(DATA_GOOD_DIR, "*.jpeg")):
tmp_images += [scipy.misc.imresize(plt.imread(image_path, format='jpeg'), (args.image_size, args.image_size))]
tmp_labels += [1]
for image_path in glob.glob(os.path.join(DATA_BAD_DIR, "*.jpeg")):
tmp_images += [scipy.misc.imresize(plt.imread(image_path, format='jpeg'), (args.image_size, args.image_size))]
tmp_labels += [0]
images_train, images_val, labels_train, labels_val = train_test_split(tmp_images, tmp_labels, test_size=args.test_split, random_state=15)
aug_images = []
aug_labels = []
# prefill images and labels with default data
aug_images += images_train
aug_labels += labels_train
for i in range(args.augment_factor):
aug_images += data_augment.seq.augment_images(images_train)
aug_labels.extend(labels_train)
images_train = np.array(aug_images, dtype=np.float32)
labels_train = aug_labels
images_val = np.array(images_val, dtype=np.float32)
if images_train.shape[-1] == 1 or len(images_train.shape) == 3:
images_train = np.stack((images_train,)*3, -1)
images_train = preprocess_input(images_train)
if images_val.shape[-1] == 1 or len(images_val.shape) == 3:
images_val = np.stack((images_val,)*3, -1)
images_val = preprocess_input(images_val)
extractor_model = nail_model.get_extractor_model(args.image_size)
model = keras.Model(inputs=extractor_model.input, outputs=extractor_model.get_layer('global_average_pooling2d_1').output)
bottleneck_features_train = model.predict(images_train)
bottleneck_features_val = model.predict(images_val)
print("Features shape:", bottleneck_features_train.shape)
model, graph = nail_model.get_top_model()
model.compile(loss='binary_crossentropy', optimizer=keras.optimizers.SGD(lr=args.learning_rate, momentum=0.9, decay=1e-6, nesterov=True), metrics=['accuracy'])
model.fit(
bottleneck_features_train,labels_train,
validation_data=(bottleneck_features_val, labels_val),
validation_steps=2,
steps_per_epoch=len(images_train)//args.batch_size+1,
epochs=args.epochs)
if not os.path.exists(args.model_save_dir):
os.mkdir(args.model_save_dir)
model.save_weights(os.path.join(args.model_save_dir, args.model_filename))
with open(os.path.join(args.model_save_dir, CONFIG_FILE_NAME), 'w') as f:
f.write(json.dumps(vars(args)))
| 39.042017
| 163
| 0.716315
|
e3d2395607346936b80fe9dbecc816288a5db6f5
| 13,314
|
py
|
Python
|
sites/sandbox/settings.py
|
Idematica/django-oscar
|
242a0654210d63ba75f798788916c8b2f7abb7fb
|
[
"BSD-3-Clause"
] | null | null | null |
sites/sandbox/settings.py
|
Idematica/django-oscar
|
242a0654210d63ba75f798788916c8b2f7abb7fb
|
[
"BSD-3-Clause"
] | null | null | null |
sites/sandbox/settings.py
|
Idematica/django-oscar
|
242a0654210d63ba75f798788916c8b2f7abb7fb
|
[
"BSD-3-Clause"
] | null | null | null |
import os
# Path helper
PROJECT_DIR = os.path.dirname(__file__)
location = lambda x: os.path.join(
os.path.dirname(os.path.realpath(__file__)), x)
USE_TZ = True
DEBUG = True
TEMPLATE_DEBUG = True
SQL_DEBUG = True
SEND_BROKEN_LINK_EMAILS = False
ALLOWED_HOSTS = ['latest.oscarcommerce.com',
'sandbox.oscar.tangentlabs.co.uk',
'master.oscarcommerce.com']
ADMINS = (
('David Winterbottom', 'david.winterbottom@tangentlabs.co.uk'),
)
EMAIL_SUBJECT_PREFIX = '[Oscar sandbox] '
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
MANAGERS = ADMINS
# Use a Sqlite database by default
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.path.dirname(__file__), 'db.sqlite'),
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-gb'
# This should match the locale folders in oscar/locale
LANGUAGES = (
('en-gb', 'English'),
('da', 'Danish'),
('de', 'German'),
('el', 'Greek'),
('en', 'English'),
('es', 'Spanish'),
('fr', 'French'),
('it', 'Italian'),
('ja', 'Japanese'),
('pl', 'Polish'),
('pt', 'Portugese'),
('ru', 'Russian'),
('sk', 'Slovakian'),
)
ROSETTA_STORAGE_CLASS = 'rosetta.storage.SessionRosettaStorage'
ROSETTA_ENABLE_TRANSLATION_SUGGESTIONS = True
ROSETTA_REQUIRES_AUTH = False
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = location("public/media")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
#ADMIN_MEDIA_PREFIX = '/media/admin/'
STATIC_URL = '/static/'
STATIC_ROOT = location('public/static')
STATICFILES_DIRS = (
location('static/'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '$)a7n&o80u!6y5t-+jrd3)3!%vh&shg$wqpjpxc!ar&p#!)n1a'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# needed by django-treebeard for admin (and potentially other libs)
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
# Oscar specific
'oscar.apps.search.context_processors.search_form',
'oscar.apps.promotions.context_processors.promotions',
'oscar.apps.checkout.context_processors.checkout',
'oscar.core.context_processors.metadata',
'oscar.apps.customer.notifications.context_processors.notifications',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
# Allow languages to be selected
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
# Ensure a valid basket is added to the request instance for every request
'oscar.apps.basket.middleware.BasketMiddleware',
# Enable the ProfileMiddleware, then add ?cprofile to any
# URL path to print out profile details
#'oscar.profiling.middleware.ProfileMiddleware',
)
ROOT_URLCONF = 'urls'
# Add another path to Oscar's templates. This allows templates to be
# customised easily.
from oscar import OSCAR_MAIN_TEMPLATE_DIR
TEMPLATE_DIRS = (
location('templates'),
OSCAR_MAIN_TEMPLATE_DIR,
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(message)s',
},
'simple': {
'format': '[%(asctime)s] %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'checkout_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'checkout.log',
'formatter': 'verbose'
},
'gateway_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'gateway.log',
'formatter': 'simple'
},
'error_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'errors.log',
'formatter': 'verbose'
},
'sorl_file': {
'level': 'INFO',
'class': 'oscar.core.logging.handlers.EnvFileHandler',
'filename': 'sorl.log',
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'filters': ['require_debug_false'],
},
},
'loggers': {
# Django loggers
'django': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['mail_admins', 'error_file'],
'level': 'ERROR',
'propagate': False,
},
'django.db.backends': {
'handlers': ['null'],
'propagate': False,
'level': 'DEBUG',
},
# Oscar core loggers
'oscar.checkout': {
'handlers': ['console', 'checkout_file'],
'propagate': False,
'level': 'INFO',
},
'oscar.catalogue.import': {
'handlers': ['console'],
'propagate': False,
'level': 'INFO',
},
'oscar.alerts': {
'handlers': ['null'],
'propagate': False,
'level': 'INFO',
},
# Sandbox logging
'gateway': {
'handlers': ['gateway_file'],
'propagate': True,
'level': 'INFO',
},
# Third party
'south': {
'handlers': ['null'],
'propagate': True,
'level': 'INFO',
},
'sorl.thumbnail': {
'handlers': ['sorl_file'],
'propagate': True,
'level': 'INFO',
},
# Suppress output of this debug toolbar panel
'template_timings_panel': {
'handlers': ['null'],
'level': 'DEBUG',
'propagate': False,
}
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.flatpages',
'django.contrib.staticfiles',
'django_extensions',
# Debug toolbar + extensions
'debug_toolbar',
'template_timings_panel',
'south',
'rosetta', # For i18n testing
'compressor',
'apps.gateway', # For allowing dashboard access
]
from oscar import get_core_apps
INSTALLED_APPS = INSTALLED_APPS + get_core_apps()
# Add Oscar's custom auth backend so users can sign in using their email
# address.
AUTHENTICATION_BACKENDS = (
'oscar.apps.customer.auth_backends.Emailbackend',
'django.contrib.auth.backends.ModelBackend',
)
LOGIN_REDIRECT_URL = '/'
APPEND_SLASH = True
# Haystack settings
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': os.path.join(os.path.dirname(__file__), 'whoosh_index'),
},
}
# =============
# Debug Toolbar
# =============
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'template_timings_panel.panels.TemplateTimings.TemplateTimings',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
# ==============
# Oscar settings
# ==============
from oscar.defaults import *
# Meta
# ====
OSCAR_SHOP_TAGLINE = 'Sandbox'
# Enter Google Analytics ID for the tracking to be included in the templates
GOOGLE_ANALYTICS_ID = 'UA-45363517-3'
OSCAR_RECENTLY_VIEWED_PRODUCTS = 20
OSCAR_ALLOW_ANON_CHECKOUT = True
# This is added to each template context by the core context processor. It is
# useful for test/stage/qa sites where you want to show the version of the site
# in the page title.
DISPLAY_VERSION = False
# Order processing
# ================
# Some sample order/line status settings
OSCAR_INITIAL_ORDER_STATUS = 'Pending'
OSCAR_INITIAL_LINE_STATUS = 'Pending'
OSCAR_ORDER_STATUS_PIPELINE = {
'Pending': ('Being processed', 'Cancelled',),
'Being processed': ('Processed', 'Cancelled',),
'Cancelled': (),
'Processed': (),
}
# LESS/CSS/statics
# ================
# We default to using CSS files, rather than the LESS files that generate them.
# If you want to develop Oscar's CSS, then set USE_LESS=True and
# COMPRESS_ENABLED=False in your settings_local module and ensure you have
# 'lessc' installed. You can do this by running:
#
# pip install -r requirements_less.txt
#
# which will install node.js and less in your virtualenv.
USE_LESS = False
COMPRESS_ENABLED = True
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc {infile} {outfile}'),
)
COMPRESS_OFFLINE_CONTEXT = {
'STATIC_URL': 'STATIC_URL',
'use_less': USE_LESS,
}
# We do this to work around an issue in compressor where the LESS files are
# compiled but compression isn't enabled. When this happens, the relative URL
# is wrong between the generated CSS file and other assets:
# https://github.com/jezdez/django_compressor/issues/226
COMPRESS_OUTPUT_DIR = 'oscar'
# Logging
# =======
LOG_ROOT = location('logs')
# Ensure log root exists
if not os.path.exists(LOG_ROOT):
os.mkdir(LOG_ROOT)
# Sorl
# ====
THUMBNAIL_DEBUG = True
THUMBNAIL_KEY_PREFIX = 'oscar-sandbox'
# Use a custom KV store to handle integrity error
THUMBNAIL_KVSTORE = 'oscar.sorl_kvstore.ConcurrentKVStore'
# Django 1.6 has switched to JSON serializing for security reasons, but it does not
# serialize Models. We should resolve this by extending the
# django/core/serializers/json.Serializer to have the `dumps` function. Also
# in tests/config.py
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
# Try and import local settings which can be used to override any of the above.
try:
from settings_local import *
except ImportError:
pass
| 30.054176
| 83
| 0.648941
|
1c2d1fc8a2b5942f018d1861c44ee51d2467d88e
| 1,886
|
py
|
Python
|
src/extended_webdrivers/frame.py
|
dillonm197/extended-webdrivers
|
9cb4cdb75f37c66ee1ac7fa13b947ae3bcb17863
|
[
"MIT"
] | null | null | null |
src/extended_webdrivers/frame.py
|
dillonm197/extended-webdrivers
|
9cb4cdb75f37c66ee1ac7fa13b947ae3bcb17863
|
[
"MIT"
] | null | null | null |
src/extended_webdrivers/frame.py
|
dillonm197/extended-webdrivers
|
9cb4cdb75f37c66ee1ac7fa13b947ae3bcb17863
|
[
"MIT"
] | 1
|
2019-08-07T01:48:36.000Z
|
2019-08-07T01:48:36.000Z
|
class Frame:
""" Base class for handling switching to and from iframes using context managers. """
def __init__(self, child_frame):
self.driver = child_frame.parent
assert child_frame.tag_name.lower() == 'iframe'
self.child_frame = child_frame
def _switch_to(self):
""" Switches to the specified frame. """
# Store the parent window and frame to access when we leave the child frame.
self.parent_window = self.driver.current_window_handle
self.parent_frame = self.driver.frame
# Switch to the child frame.
self.driver.switch_to.frame(self.child_frame)
self.driver.angular = self.driver._test_angular()
self.driver.jquery = self.driver._test_jquery()
if self.driver.sync_angular:
self.driver.wait_for_angular()
if self.driver.sync_jquery:
self.driver.wait_for_jquery(self.driver._script_timeout)
def __enter__(self):
self._switch_to()
return self
def _switch_from(self):
""" Switches to the previous frame. """
# Switch to the default window and frame.
self.driver.switch_to.default_content()
# Switch to the parent window.
if self.driver.current_window_handle != self.parent_window:
self.driver.switch_to.window(self.parent_window)
# Switch to parent frame if it exists.
if self.parent_frame is not None:
self.driver.switch_to.frame(self.parent_frame)
self.driver.angular = self.driver._test_angular()
self.driver.jquery = self.driver._test_jquery()
if self.driver.sync_angular:
self.driver.wait_for_angular()
if self.driver.sync_jquery:
self.driver.wait_for_jquery(self.driver._script_timeout)
def __exit__(self, exc_type, exc_val, exc_tb):
self._switch_from()
| 35.584906
| 89
| 0.662248
|
5e7ce3dc184cb55630a63d0518ec34c899e72db3
| 765
|
py
|
Python
|
training_data/parse_data.py
|
Occupyig-Mars/alter-nlu
|
d689b0a589216e85f59a0073f8415c5d22b679f8
|
[
"Apache-2.0"
] | 21
|
2019-02-25T21:47:15.000Z
|
2019-12-26T08:59:44.000Z
|
training_data/parse_data.py
|
Occupyig-Mars/alter-nlu
|
d689b0a589216e85f59a0073f8415c5d22b679f8
|
[
"Apache-2.0"
] | null | null | null |
training_data/parse_data.py
|
Occupyig-Mars/alter-nlu
|
d689b0a589216e85f59a0073f8415c5d22b679f8
|
[
"Apache-2.0"
] | 9
|
2020-02-26T06:31:31.000Z
|
2022-01-24T10:22:35.000Z
|
import pandas as pd
# get structured data
def get_data(val):
botName = val['botName']
entity = pd.io.json.json_normalize(val['entity_data'], record_path='data', meta='name')
train = pd.io.json.json_normalize(val['intent_data']).drop(['entities'], axis=1).drop_duplicates()
intent_entity = list(set(pd.io.json.json_normalize(val['intent_data'], record_path='entities', meta='intent')['intent'].tolist()))
print('> Training Bot : ' + botName)
print("\tTotal training examples : {}\n\tIntents : {}".format(train.shape[0], len(train['intent'].unique())))
if entity.empty:
print("\tEntities : Not Added.")
else:
print("\tEntities : {}".format(len(entity['name'].unique())))
return entity, train, intent_entity
| 40.263158
| 134
| 0.657516
|
d3ba290d1e91e7982a52c613557280665ee81de3
| 1,567
|
py
|
Python
|
examples/split_data_for_unbiased_estimation.py
|
PGBI/Surprise
|
76e47037675afc6c0fb017490a88d1b2b2dff0f7
|
[
"BSD-3-Clause"
] | 5,572
|
2016-11-24T08:21:53.000Z
|
2022-03-31T20:35:00.000Z
|
examples/split_data_for_unbiased_estimation.py
|
daihui-lu/Surprise
|
46b9914995e6c8c7d227b46f2eaeef2d4600580f
|
[
"BSD-3-Clause"
] | 393
|
2016-11-22T12:48:00.000Z
|
2022-03-26T15:09:53.000Z
|
examples/split_data_for_unbiased_estimation.py
|
daihui-lu/Surprise
|
46b9914995e6c8c7d227b46f2eaeef2d4600580f
|
[
"BSD-3-Clause"
] | 1,096
|
2016-12-08T22:01:57.000Z
|
2022-03-29T03:55:54.000Z
|
"""
This module descibes how to split a dataset into two parts A and B: A is for
tuning the algorithm parameters, and B is for having an unbiased estimation of
its performances. The tuning is done by Grid Search.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import random
from surprise import SVD
from surprise import Dataset
from surprise import accuracy
from surprise.model_selection import GridSearchCV
# Load the full dataset.
data = Dataset.load_builtin('ml-100k')
raw_ratings = data.raw_ratings
# shuffle ratings if you want
random.shuffle(raw_ratings)
# A = 90% of the data, B = 10% of the data
threshold = int(.9 * len(raw_ratings))
A_raw_ratings = raw_ratings[:threshold]
B_raw_ratings = raw_ratings[threshold:]
data.raw_ratings = A_raw_ratings # data is now the set A
# Select your best algo with grid search.
print('Grid Search...')
param_grid = {'n_epochs': [5, 10], 'lr_all': [0.002, 0.005]}
grid_search = GridSearchCV(SVD, param_grid, measures=['rmse'], cv=3)
grid_search.fit(data)
algo = grid_search.best_estimator['rmse']
# retrain on the whole set A
trainset = data.build_full_trainset()
algo.fit(trainset)
# Compute biased accuracy on A
predictions = algo.test(trainset.build_testset())
print('Biased accuracy on A,', end=' ')
accuracy.rmse(predictions)
# Compute unbiased accuracy on B
testset = data.construct_testset(B_raw_ratings) # testset is now the set B
predictions = algo.test(testset)
print('Unbiased accuracy on B,', end=' ')
accuracy.rmse(predictions)
| 29.018519
| 78
| 0.749202
|
6c295380242cd79dd3f66866d3d86b80e81f20cf
| 2,523
|
py
|
Python
|
tests/integration/enforcementlevel/policy-pack-python/__main__.py
|
rsclarke-vgw/pulumi-policy
|
053a5d3608432ff945bafbd7c8d5a8ffde0d3498
|
[
"Apache-2.0"
] | 19
|
2019-11-23T16:12:20.000Z
|
2022-01-19T19:34:57.000Z
|
tests/integration/enforcementlevel/policy-pack-python/__main__.py
|
rsclarke-vgw/pulumi-policy
|
053a5d3608432ff945bafbd7c8d5a8ffde0d3498
|
[
"Apache-2.0"
] | 160
|
2019-08-21T21:40:01.000Z
|
2022-02-13T10:52:35.000Z
|
tests/integration/enforcementlevel/policy-pack-python/__main__.py
|
rsclarke-vgw/pulumi-policy
|
053a5d3608432ff945bafbd7c8d5a8ffde0d3498
|
[
"Apache-2.0"
] | 5
|
2019-09-03T17:14:06.000Z
|
2021-07-13T04:25:05.000Z
|
# Copyright 2016-2020, Pulumi Corporation. All rights reserved.
from typing import List, NamedTuple, Optional
from pulumi import Config
from pulumi_policy import (
EnforcementLevel,
PolicyPack,
ResourceValidationPolicy,
StackValidationPolicy,
)
class Scenario(NamedTuple):
pack: Optional[EnforcementLevel]
policy: Optional[EnforcementLevel]
# Build a set of scenarios to test
enforcement_levels = [EnforcementLevel.ADVISORY, EnforcementLevel.DISABLED, EnforcementLevel.MANDATORY, None]
scenarios: List[Scenario] = [{}]
for pack in enforcement_levels:
for policy in enforcement_levels:
scenarios.append(Scenario(pack, policy))
# Get the current scenario
config = Config()
test_scenario = config.require_int("scenario")
if test_scenario >= len(scenarios):
raise AssertionError(f"Unexpected test_scenario {test_scenario}.")
scenario = scenarios[test_scenario]
# Generate a Policy Pack name for the scenario.
pack: str = scenario.pack.value if scenario.pack is not None else "none"
policy: str = f"-{scenario.policy.value}" if scenario.policy is not None else ""
policy_pack_name = f"enforcementlevel-{pack}{policy}-test-policy"
# Whether the validate function should raise an exception (to validate that it doesn't run).
validate_function_raises = (
(scenario.pack == EnforcementLevel.DISABLED and
(scenario.policy == EnforcementLevel.DISABLED or scenario.policy is None)) or
scenario.policy == EnforcementLevel.DISABLED)
# Create a Policy Pack instance for the scenario.
def validate_resource(args, report_violation):
if validate_function_raises:
raise AssertionError("validate-resource should never be called.")
report_violation("validate-resource-violation-message")
def validate_stack(args, report_violation):
if validate_function_raises:
raise AssertionError("validate-stack should never be called.")
report_violation("validate-stack-violation-message")
PolicyPack(
name=policy_pack_name,
enforcement_level=scenario.pack,
policies=[
ResourceValidationPolicy(
name="validate-resource",
description="Always reports a resource violation.",
enforcement_level=scenario.policy,
validate=validate_resource,
),
StackValidationPolicy(
name="validate-stack",
description="Always reports a stack violation.",
enforcement_level=scenario.policy,
validate=validate_stack,
),
],
)
| 30.39759
| 109
| 0.731272
|
ab9623ddf8cd6b9cfd10039b659a558a409067ba
| 2,635
|
py
|
Python
|
tests/extmod/vfs_fat_fileio2.py
|
MakeItZone/circuitpython
|
7f803c0b51c333210ed267502422ed7bb28b9be7
|
[
"Unlicense",
"BSD-3-Clause",
"MIT-0",
"MIT"
] | 36
|
2017-05-20T21:11:49.000Z
|
2022-01-30T18:36:45.000Z
|
tests/extmod/vfs_fat_fileio2.py
|
MakeItZone/circuitpython
|
7f803c0b51c333210ed267502422ed7bb28b9be7
|
[
"Unlicense",
"BSD-3-Clause",
"MIT-0",
"MIT"
] | 6
|
2018-06-01T15:12:52.000Z
|
2022-01-11T00:10:28.000Z
|
tests/extmod/vfs_fat_fileio2.py
|
MakeItZone/circuitpython
|
7f803c0b51c333210ed267502422ed7bb28b9be7
|
[
"Unlicense",
"BSD-3-Clause",
"MIT-0",
"MIT"
] | 6
|
2021-05-12T20:45:46.000Z
|
2021-12-27T04:51:56.000Z
|
try:
import uerrno
import uos
except ImportError:
print("SKIP")
raise SystemExit
try:
uos.VfsFat
except AttributeError:
print("SKIP")
raise SystemExit
class RAMFS:
SEC_SIZE = 512
def __init__(self, blocks):
self.data = bytearray(blocks * self.SEC_SIZE)
def readblocks(self, n, buf):
#print("readblocks(%s, %x(%d))" % (n, id(buf), len(buf)))
for i in range(len(buf)):
buf[i] = self.data[n * self.SEC_SIZE + i]
return 0
def writeblocks(self, n, buf):
#print("writeblocks(%s, %x)" % (n, id(buf)))
for i in range(len(buf)):
self.data[n * self.SEC_SIZE + i] = buf[i]
return 0
def ioctl(self, op, arg):
#print("ioctl(%d, %r)" % (op, arg))
if op == 4: # BP_IOCTL_SEC_COUNT
return len(self.data) // self.SEC_SIZE
if op == 5: # BP_IOCTL_SEC_SIZE
return self.SEC_SIZE
try:
bdev = RAMFS(50)
except MemoryError:
print("SKIP")
raise SystemExit
uos.VfsFat.mkfs(bdev)
vfs = uos.VfsFat(bdev)
uos.mount(vfs, '/ramdisk')
uos.chdir('/ramdisk')
try:
vfs.mkdir("foo_dir")
except OSError as e:
print(e.args[0] == uerrno.EEXIST)
try:
vfs.remove("foo_dir")
except OSError as e:
print(e.args[0] == uerrno.EISDIR)
try:
vfs.remove("no_file.txt")
except OSError as e:
print(e.args[0] == uerrno.ENOENT)
try:
vfs.rename("foo_dir", "/null/file")
except OSError as e:
print(e.args[0] == uerrno.ENOENT)
# file in dir
with open("foo_dir/file-in-dir.txt", "w+t") as f:
f.write("data in file")
with open("foo_dir/file-in-dir.txt", "r+b") as f:
print(f.read())
with open("foo_dir/sub_file.txt", "w") as f:
f.write("subdir file")
# directory not empty
try:
vfs.rmdir("foo_dir")
except OSError as e:
print(e.args[0] == uerrno.EACCES)
# trim full path
vfs.rename("foo_dir/file-in-dir.txt", "foo_dir/file.txt")
print(list(vfs.ilistdir("foo_dir")))
vfs.rename("foo_dir/file.txt", "moved-to-root.txt")
print(list(vfs.ilistdir()))
# check that renaming to existing file will overwrite it
with open("temp", "w") as f:
f.write("new text")
vfs.rename("temp", "moved-to-root.txt")
print(list(vfs.ilistdir()))
with open("moved-to-root.txt") as f:
print(f.read())
# valid removes
vfs.remove("foo_dir/sub_file.txt")
vfs.rmdir("foo_dir")
print(list(vfs.ilistdir()))
# disk full
try:
bsize = vfs.statvfs("/ramdisk")[0]
free = vfs.statvfs("/ramdisk")[2] + 1
f = open("large_file.txt", "wb")
f.write(bytearray(bsize * free))
except OSError as e:
print("ENOSPC:", e.args[0] == 28) # uerrno.ENOSPC
| 22.521368
| 65
| 0.614801
|
7b8c944f1f73294a1464583e8d1285afbc1dbaac
| 4,043
|
py
|
Python
|
mypy/scope.py
|
Affirm/mypy
|
bc0b551d8df2baadc44f0c3b0b801fcc12119658
|
[
"PSF-2.0"
] | 4
|
2018-10-19T04:36:20.000Z
|
2020-02-13T16:14:09.000Z
|
mypy/scope.py
|
Affirm/mypy
|
bc0b551d8df2baadc44f0c3b0b801fcc12119658
|
[
"PSF-2.0"
] | null | null | null |
mypy/scope.py
|
Affirm/mypy
|
bc0b551d8df2baadc44f0c3b0b801fcc12119658
|
[
"PSF-2.0"
] | 1
|
2019-02-13T04:45:01.000Z
|
2019-02-13T04:45:01.000Z
|
"""Track current scope to easily calculate the corresponding fine-grained target.
TODO: Use everywhere where we track targets, including in mypy.errors.
"""
from contextlib import contextmanager
from typing import List, Optional, Iterator, Tuple
from mypy.nodes import TypeInfo, FuncBase
class Scope:
"""Track which target we are processing at any given time."""
SavedScope = Tuple[str, Optional[TypeInfo], Optional[FuncBase]]
def __init__(self) -> None:
self.module = None # type: Optional[str]
self.classes = [] # type: List[TypeInfo]
self.function = None # type: Optional[FuncBase]
# Number of nested scopes ignored (that don't get their own separate targets)
self.ignored = 0
def current_module_id(self) -> str:
assert self.module
return self.module
def current_target(self) -> str:
"""Return the current target (non-class; for a class return enclosing module)."""
assert self.module
if self.function:
return self.function.fullname()
return self.module
def current_full_target(self) -> str:
"""Return the current target (may be a class)."""
assert self.module
if self.function:
return self.function.fullname()
if self.classes:
return self.classes[-1].fullname()
return self.module
def current_type_name(self) -> Optional[str]:
"""Return the current type's short name if it exists"""
return self.classes[-1].name() if self.classes else None
def current_function_name(self) -> Optional[str]:
"""Return the current function's short name if it exists"""
return self.function.name() if self.function else None
def enter_file(self, prefix: str) -> None:
self.module = prefix
self.classes = []
self.function = None
self.ignored = 0
def enter_function(self, fdef: FuncBase) -> None:
if not self.function:
self.function = fdef
else:
# Nested functions are part of the topmost function target.
self.ignored += 1
def enter_class(self, info: TypeInfo) -> None:
"""Enter a class target scope."""
if not self.function:
self.classes.append(info)
else:
# Classes within functions are part of the enclosing function target.
self.ignored += 1
def leave(self) -> None:
"""Leave the innermost scope (can be any kind of scope)."""
if self.ignored:
# Leave a scope that's included in the enclosing target.
self.ignored -= 1
elif self.function:
# Function is always the innermost target.
self.function = None
elif self.classes:
# Leave the innermost class.
self.classes.pop()
else:
# Leave module.
assert self.module
self.module = None
def save(self) -> SavedScope:
"""Produce a saved scope that can be entered with saved_scope()"""
assert self.module
# We only save the innermost class, which is sufficient since
# the rest are only needed for when classes are left.
cls = self.classes[-1] if self.classes else None
return (self.module, cls, self.function)
@contextmanager
def function_scope(self, fdef: FuncBase) -> Iterator[None]:
self.enter_function(fdef)
yield
self.leave()
@contextmanager
def class_scope(self, info: TypeInfo) -> Iterator[None]:
self.enter_class(info)
yield
self.leave()
@contextmanager
def saved_scope(self, saved: SavedScope) -> Iterator[None]:
module, info, function = saved
self.enter_file(module)
if info:
self.enter_class(info)
if function:
self.enter_function(function)
yield
if function:
self.leave()
if info:
self.leave()
self.leave()
| 32.869919
| 89
| 0.610438
|
e17aef8c65d113c1e58dfc72460ad4322a264b34
| 4,324
|
py
|
Python
|
WebRequest/Handlers.py
|
ImportTaste/WebRequest
|
0cc385622624de16ec980e0c12d9080d593cab74
|
[
"WTFPL"
] | 8
|
2018-06-04T09:34:28.000Z
|
2021-09-16T15:21:24.000Z
|
WebRequest/Handlers.py
|
ImportTaste/WebRequest
|
0cc385622624de16ec980e0c12d9080d593cab74
|
[
"WTFPL"
] | 4
|
2018-03-03T07:45:27.000Z
|
2019-12-26T20:38:18.000Z
|
WebRequest/Handlers.py
|
ImportTaste/WebRequest
|
0cc385622624de16ec980e0c12d9080d593cab74
|
[
"WTFPL"
] | 1
|
2019-12-26T20:36:32.000Z
|
2019-12-26T20:36:32.000Z
|
#!/usr/bin/python3
import sys
import codecs
import http.client
import email.parser
import urllib.request
import urllib.parse
import urllib.error
import os.path
import time
import http.cookiejar
import traceback
import logging
import zlib
import bs4
import re
import string
import gzip
import io
import socket
import json
import base64
import random
class HeadRequest(urllib.request.Request):
def get_method(self):
# Apparently HEAD is now being blocked. Because douche.
return "GET"
# return "HEAD"
class HTTPRedirectBlockerErrorHandler(urllib.request.HTTPErrorProcessor): # pragma: no cover
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
# only add this line to stop 302 redirection.
if code == 302:
print("Code!", 302)
return response
if code == 301:
print("Code!", 301)
return response
print("[HTTPRedirectBlockerErrorHandler] http_response! code:", code)
print(hdrs)
print(msg)
if not (200 <= code < 300):
response = self.parent.error('http', request, response, code, msg, hdrs)
return response
https_response = http_response
# Custom redirect handler to work around
# issue https://bugs.python.org/issue17214
class HTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if "location" in headers:
newurl = headers["location"]
elif "uri" in headers:
newurl = headers["uri"]
else:
return
# fix a possible malformed URL
urlparts = urllib.parse.urlparse(newurl)
# For security reasons we don't allow redirection to anything other
# than http, https or ftp.
if urlparts.scheme not in ('http', 'https', 'ftp', ''):
raise urllib.error.HTTPError(
newurl, code,
"%s - Redirection to url '%s' is not allowed" % (msg, newurl),
headers, fp)
if not urlparts.path:
urlparts = list(urlparts)
urlparts[2] = "/"
newurl = urllib.parse.urlunparse(urlparts)
# http.client.parse_headers() decodes as ISO-8859-1. Recover the
# original bytes and percent-encode non-ASCII bytes, and any special
# characters such as the space.
newurl = urllib.parse.quote(
newurl, encoding="iso-8859-1", safe=string.punctuation)
newurl = urllib.parse.urljoin(req.full_url, newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None: # pragma: no cover
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise urllib.error.HTTPError(req.full_url, code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new, timeout=req.timeout)
class PreemptiveBasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
'''Preemptive basic auth.
Instead of waiting for a 403 to then retry with the credentials,
send the credentials if the url is handled by the password manager.
Note: please use realm=None when calling add_password.'''
def http_request(self, req):
url = req.get_full_url()
realm = None
# this is very similar to the code from retry_http_basic_auth()
# but returns a request object.
user, pw = self.passwd.find_user_password(realm, url)
if pw:
raw = "%s:%s" % (user, pw)
raw = raw.encode("ascii")
auth = b'Basic ' + base64.standard_b64encode(raw).strip()
req.add_unredirected_header(self.auth_header, auth)
return req
https_request = http_request
| 29.216216
| 95
| 0.725717
|
e4700e15c8c8bb413ed08d0468170cae2f3db626
| 5,135
|
py
|
Python
|
main.py
|
ddeeddii/mod-diagnosis-tool
|
652ec02ebd395387504dd06f8c86ca3a1ef57cdf
|
[
"MIT"
] | null | null | null |
main.py
|
ddeeddii/mod-diagnosis-tool
|
652ec02ebd395387504dd06f8c86ca3a1ef57cdf
|
[
"MIT"
] | null | null | null |
main.py
|
ddeeddii/mod-diagnosis-tool
|
652ec02ebd395387504dd06f8c86ca3a1ef57cdf
|
[
"MIT"
] | null | null | null |
import tkinter as tk
import tkinter.filedialog as fd
from xml.dom import minidom
import os
from math import floor
from datetime import datetime
# https://stackoverflow.com/questions/287871/how-to-print-colored-text-to-the-terminal
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def colorPrint(text, color):
text = f"{color}{text}{bcolors.ENDC}"
print(text)
def cls():
os.system('cls' if os.name=='nt' else 'clear')
def checkItempools(path):
_, mTail = os.path.split(path)
content = f"{path}/content"
if not os.path.isdir(content):
return
itemsPath = f"{content}/items.xml"
itempoolsPath = f"{content}/itempools.xml"
if not os.path.isfile(itemsPath) or not os.path.isfile(itempoolsPath):
return
# Parse items.xml and get all item names
try:
parsedItems = minidom.parse(itemsPath)
except Exception as e:
print(f"Unable to parse items.xml from mod {mTail}.\nManual checking of itempools.xml and items.xml parity required!\n")
return
itemslist = []
tagsToFind = ["active", "passive", "familiar"]
for tag in tagsToFind:
itemslist += parsedItems.getElementsByTagName(tag)
itemsxmlNames = []
for s in itemslist:
name = str(s.attributes["name"].value)
itemsxmlNames.append(name)
# Parse itempools.xml and get all item names
try:
parsedItempools = minidom.parse(itempoolsPath)
except Exception as e:
print(f"Unable to parse itempools.xml from mod {mTail}.\nManual checking of itempools.xml and items.xml parity required!")
return
itempoolsList = parsedItempools.getElementsByTagName("Item")
# Get all item names from itempools.xml and compare them to items.xml
erroredItems = []
for s in itempoolsList:
name = str(s.attributes["Name"].value)
if not (name in itemsxmlNames):
if not (name in erroredItems):
erroredItems.append(name)
return erroredItems
def checkDates(path):
patchDayTime = 1621093333 # may 15th, 2021 - patch where mods were enabled for rep
timeModified = floor(os.path.getmtime(path))
if timeModified < patchDayTime:
return timeModified
def checkResources(path):
resources = f"{path}/resources"
if not os.path.isdir(resources):
return
erroredFiles = []
with os.scandir(resources) as resourcesFolder:
for file in resourcesFolder:
_, extension = os.path.splitext(file.path)
if extension == ".xml":
erroredFiles.append(file.name)
return erroredFiles
def main():
# Ask user to select the mods folder
print("Select the 'mods' directory where Isaac is installed:\n")
root = tk.Tk()
root.withdraw()
modsPath = fd.askdirectory(title="Select the 'mods' directory where Isaac is installed")
_, modsTail = os.path.split(modsPath)
if modsTail != "mods":
input("Path is incorrect! Make sure you selected the 'mods' folder in the place where Isaac is installed (where 'isaac-ng.exe' is)")
os._exit(0)
problems = []
with os.scandir(modsPath) as modsMain:
for rootEntry in modsMain:
_, modTail = os.path.split(rootEntry.path)
# Itempools checking
erroredItems = checkItempools(rootEntry.path)
if erroredItems:
for item in erroredItems:
problems.append(["ERROR", modTail, f"Item named '{item}' is present in mod's itempools.xml but not in items.xml!"])
# XMLs in /resources/
erroredFiles = checkResources(rootEntry.path)
if erroredFiles:
for file in erroredFiles:
if file == "items.xml":
problems.append(["ERROR", modTail, f"File named '{file}' is present in mod's /resources/ folder!"])
else:
problems.append(["WARN ", modTail, f"File named '{file}' is present in mod's /resources/ folder!"])
# Edit times
oldFile = checkDates(rootEntry.path)
if oldFile:
date = datetime.utcfromtimestamp(oldFile).strftime("%Y-%m-%d")
problems.append(["WARN ", modTail, f"File was modified {date} which was before mods were enabled for Repentance!"])
print("\nFinished diagnosing!\n")
os.system('color') # Prepare for color printing
for entry in problems:
problemLevel = entry[0]
modName = entry[1]
problemText = entry[2]
text = f"{problemLevel}: {modName} | {problemText}"
if problemLevel == "WARN ":
color = bcolors.WARNING
elif problemLevel == "ERROR":
color = bcolors.FAIL
colorPrint(text, color)
print("\nPress enter to exit.")
input()
if __name__ == "__main__":
# Start the sequence
main()
| 30.02924
| 140
| 0.615385
|
d5eb441d2eaaeccd5e928fa5055d65885b949820
| 47,887
|
py
|
Python
|
pygemfxns_output.py
|
Wang518hongyu/PyGEMwangtest
|
f6ff507681b45599d0ecce5be2e5292e94fd09f7
|
[
"MIT"
] | null | null | null |
pygemfxns_output.py
|
Wang518hongyu/PyGEMwangtest
|
f6ff507681b45599d0ecce5be2e5292e94fd09f7
|
[
"MIT"
] | null | null | null |
pygemfxns_output.py
|
Wang518hongyu/PyGEMwangtest
|
f6ff507681b45599d0ecce5be2e5292e94fd09f7
|
[
"MIT"
] | null | null | null |
""" Functions that pertain to creating and writing output for the model results."""
# External Libraries
import numpy as np
import netCDF4 as nc
from time import strftime
import matplotlib.pyplot as plt
# Local Libraries
import pygem_input as input
def netcdfcreate(filename, main_glac_rgi, main_glac_hyps, dates_table, output_filepath=input.output_filepath, nsims=1):
"""
Create a netcdf file to store the desired output
Parameters
----------
filename : str
netcdf filename that is being created
main_glac_rgi : pandas dataframe
dataframe containing relevant rgi glacier information
main_glac_hyps : numpy array
glacier hypsometry of every glacier included in model run
dates_table : pandas dataframe
table of the dates, months, days in month, etc.
output_filepath : str
output filepath of where to store netcdf file
nsims : int
number of simulations included
Returns
-------
creates a netcdf file with the proper structure to be fill in by the model results
"""
# Annual columns
annual_columns = np.unique(dates_table['wateryear'].values)[0:int(dates_table.shape[0]/12)]
# Netcdf file path and name
fullfilename = output_filepath + filename
# Create netcdf file ('w' will overwrite existing files, 'r+' will open existing file to write)
netcdf_output = nc.Dataset(fullfilename, 'w', format='NETCDF4')
# ===== Global attributes =====
netcdf_output.description = 'Results from glacier evolution model'
netcdf_output.history = 'Created ' + str(strftime("%Y-%m-%d %H:%M:%S"))
netcdf_output.source = 'Python Glacier Evolution Model'
# ===== Dimensions =====
glac_idx = netcdf_output.createDimension('glac_idx', None)
if input.timestep == 'monthly':
time = netcdf_output.createDimension('time', dates_table.shape[0] - input.spinupyears * 12)
year = netcdf_output.createDimension('year', annual_columns.shape[0] - input.spinupyears)
year_plus1 = netcdf_output.createDimension('year_plus1', annual_columns.shape[0] - input.spinupyears + 1)
glac_table = netcdf_output.createDimension('glac_table', main_glac_rgi.shape[1])
elevbin = netcdf_output.createDimension('elevbin', main_glac_hyps.shape[1])
sim = netcdf_output.createDimension('sim', nsims)
# Variables associated with dimensions
sims = netcdf_output.createVariable('sim', np.int32, ('sim',))
sims.long_name = 'simulation number'
sims[:] = range(0, nsims)
glaciers = netcdf_output.createVariable('glac_idx', np.int32, ('glac_idx',))
glaciers.long_name = "glacier index"
glaciers.standard_name = input.indexname
glaciers.comment = "Glacier index value that refers to the glacier table"
glaciers[:] = main_glac_rgi.index.values
times = netcdf_output.createVariable('time', np.float64, ('time',))
times.long_name = "date"
times.units = "days since 1900-01-01 00:00:00"
times.calendar = "gregorian"
if input.timestep == 'monthly':
times[:] = (nc.date2num(dates_table.loc[input.spinupyears*12:dates_table.shape[0]+1,'date'].tolist(),
units = times.units, calendar = times.calendar))
years = netcdf_output.createVariable('year', np.int32, ('year',))
years.long_name = "year"
if input.option_wateryear == 1:
years.units = 'water year'
elif input.option_wateryear == 2:
years.units = 'calendar year'
elif input.option_wateryear == 3:
years.units = 'custom year'
years[:] = annual_columns[input.spinupyears:annual_columns.shape[0]]
# years_plus1 adds an additional year such that the change in glacier dimensions (area, etc.) is recorded
years_plus1 = netcdf_output.createVariable('year_plus1', np.int32, ('year_plus1',))
years_plus1.long_name = "year with additional year to record glacier dimension changes"
if input.option_wateryear == 1:
years_plus1.units = 'water year'
elif input.option_wateryear == 2:
years_plus1.units = 'calendar year'
elif input.option_wateryear == 3:
years_plus1.units = 'custom year'
years_plus1[:] = np.concatenate((annual_columns[input.spinupyears:annual_columns.shape[0]],
np.array([annual_columns[annual_columns.shape[0]-1]+1])))
glacier_table_header = netcdf_output.createVariable('glacier_table_header',str,('glac_table',))
glacier_table_header.long_name = "glacier table header"
glacier_table_header[:] = main_glac_rgi.columns.values
glacier_table_header.comment = "Column names of RGI table and any added columns. See 'glac_table' for values."
glacier_table = netcdf_output.createVariable('glacier_table',np.float64,('glac_idx','glac_table',))
glacier_table.long_name = "glacier table values"
glacier_table[:] = main_glac_rgi.values
glacier_table.comment = "Values of RGI table and any added columns. See 'glac_table_header' for column names"
elevbins = netcdf_output.createVariable('elevbin', np.int32, ('elevbin',))
elevbins.long_name = "center of elevation bin"
elevbins.units = "m a.s.l."
elevbins[:] = main_glac_hyps.columns.values
# ===== Output Variables =====
if input.output_package == 1:
# Package 1 "Raw Package" output [units: m w.e. unless otherwise specified]:
# monthly variables for each bin (temp, prec, acc, refreeze, snowpack, melt, frontalablation, massbal_clim)
# annual variables for each bin (area, icethickness, width, surfacetype)
temp_bin_monthly = netcdf_output.createVariable('temp_bin_monthly', np.float64, ('glac_idx', 'elevbin', 'time'))
temp_bin_monthly.long_name = "air temperature"
temp_bin_monthly.units = "degC"
prec_bin_monthly = netcdf_output.createVariable('prec_bin_monthly', np.float64, ('glac_idx', 'elevbin', 'time'))
prec_bin_monthly.long_name = "liquid precipitation"
prec_bin_monthly.units = "m"
acc_bin_monthly = netcdf_output.createVariable('acc_bin_monthly', np.float64, ('glac_idx', 'elevbin', 'time'))
acc_bin_monthly.long_name = "accumulation"
acc_bin_monthly.units = "m w.e."
refreeze_bin_monthly = netcdf_output.createVariable('refreeze_bin_monthly', np.float64,
('glac_idx', 'elevbin', 'time'))
refreeze_bin_monthly.long_name = "refreezing"
refreeze_bin_monthly.units = "m w.e."
snowpack_bin_monthly = netcdf_output.createVariable('snowpack_bin_monthly', np.float64,
('glac_idx', 'elevbin', 'time'))
snowpack_bin_monthly.long_name = "snowpack on the glacier surface"
snowpack_bin_monthly.units = "m w.e."
snowpack_bin_monthly.comment = ("snowpack represents the snow depth when units are m w.e.")
melt_bin_monthly = netcdf_output.createVariable('melt_bin_monthly', np.float64, ('glac_idx', 'elevbin', 'time'))
melt_bin_monthly.long_name = 'surface melt'
melt_bin_monthly.units = "m w.e."
melt_bin_monthly.comment = ("surface melt is the sum of melt from snow, refreeze, and the underlying glacier")
frontalablation_bin_monthly = netcdf_output.createVariable('frontalablation_bin_monthly', np.float64,
('glac_idx', 'elevbin', 'time'))
frontalablation_bin_monthly.long_name = "frontal ablation"
frontalablation_bin_monthly.units = "m w.e."
frontalablation_bin_monthly.comment = ("mass losses from calving, subaerial frontal melting, sublimation above "
+ "the waterline and subaqueous frontal melting below the waterline")
massbalclim_bin_monthly = netcdf_output.createVariable('massbalclim_bin_monthly', np.float64,
('glac_idx', 'elevbin', 'time'))
massbalclim_bin_monthly.long_name = "climatic mass balance"
massbalclim_bin_monthly.units = "m w.e."
massbalclim_bin_monthly.comment = ("climatic mass balance is the sum of the surface mass balance and the "
+ "internal mass balance and accounts for the climatic mass loss over the "
+ "area of the entire bin")
area_bin_annual = netcdf_output.createVariable('area_bin_annual', np.float64,
('glac_idx', 'elevbin', 'year_plus1'))
area_bin_annual.long_name = "glacier area"
area_bin_annual.unit = "km**2"
area_bin_annual.comment = "the area that was used for the duration of the year"
icethickness_bin_annual = netcdf_output.createVariable('icethickness_bin_annual', np.float64,
('glac_idx', 'elevbin', 'year_plus1'))
icethickness_bin_annual.long_name = "ice thickness"
icethickness_bin_annual.unit = "m ice"
icethickness_bin_annual.comment = "the ice thickness that was used for the duration of the year"
width_bin_annual = netcdf_output.createVariable('width_bin_annual', np.float64,
('glac_idx', 'elevbin', 'year_plus1'))
width_bin_annual.long_name = "glacier width"
width_bin_annual.unit = "km"
width_bin_annual.comment = "the width that was used for the duration of the year"
surfacetype_bin_annual = netcdf_output.createVariable('surfacetype_bin_annual', np.float64,
('glac_idx', 'elevbin', 'year'))
surfacetype_bin_annual.long_name = "surface type"
surfacetype_bin_annual.comment = "surface types: 0 = off-glacier, 1 = ice, 2 = snow, 3 = firn, 4 = debris"
elif input.output_package == 2:
# Package 2 "Glaciologist Package" output [units: m w.e. unless otherwise specified]:
# monthly glacier-wide variables (prec, acc, refreeze, melt, frontalablation, massbal_total, runoff, snowline)
# annual glacier-wide variables (area, volume, ELA)
temp_glac_monthly = netcdf_output.createVariable('temp_glac_monthly', np.float64, ('glac_idx', 'time', 'sim'))
temp_glac_monthly.long_name = "glacier-wide mean air temperature"
temp_glac_monthly.units = "deg C"
temp_glac_monthly.comment = ("each elevation bin is weighted equally to compute the mean temperature, and bins "
+ "where the glacier no longer exists due to retreat have been removed")
prec_glac_monthly = netcdf_output.createVariable('prec_glac_monthly', np.float64, ('glac_idx', 'time', 'sim'))
prec_glac_monthly.long_name = "glacier-wide precipitation (liquid)"
prec_glac_monthly.units = "m"
acc_glac_monthly = netcdf_output.createVariable('acc_glac_monthly', np.float64, ('glac_idx', 'time', 'sim'))
acc_glac_monthly.long_name = "glacier-wide accumulation"
acc_glac_monthly.units = "m w.e."
refreeze_glac_monthly = netcdf_output.createVariable('refreeze_glac_monthly', np.float64,
('glac_idx', 'time', 'sim'))
refreeze_glac_monthly.long_name = "glacier-wide refreeze"
refreeze_glac_monthly.units = "m w.e."
melt_glac_monthly = netcdf_output.createVariable('melt_glac_monthly', np.float64, ('glac_idx', 'time', 'sim'))
melt_glac_monthly.long_name = "glacier-wide melt"
melt_glac_monthly.units = "m w.e."
frontalablation_glac_monthly = netcdf_output.createVariable('frontalablation_glac_monthly', np.float64,
('glac_idx', 'time', 'sim'))
frontalablation_glac_monthly.long_name = "glacier-wide frontal ablation"
frontalablation_glac_monthly.units = "m w.e."
frontalablation_glac_monthly.comment = ("mass losses from calving, subaerial frontal melting, sublimation above"
+ " the waterline and subaqueous frontal melting below the waterline")
massbaltotal_glac_monthly = netcdf_output.createVariable('massbaltotal_glac_monthly', np.float64,
('glac_idx', 'time', 'sim'))
massbaltotal_glac_monthly.long_name = "glacier-wide total mass balance"
massbaltotal_glac_monthly.units = "m w.e."
massbaltotal_glac_monthly.comment = ("total mass balance is the sum of the climatic mass balance and frontal "
+ "ablation.")
runoff_glac_monthly = netcdf_output.createVariable('runoff_glac_monthly', np.float64,
('glac_idx', 'time', 'sim'))
runoff_glac_monthly.long_name = "glacier runoff"
runoff_glac_monthly.units = "m**3"
runoff_glac_monthly.comment = "runoff from the glacier terminus, which moves over time"
snowline_glac_monthly = netcdf_output.createVariable('snowline_glac_monthly', np.float64,
('glac_idx', 'time', 'sim'))
snowline_glac_monthly.long_name = "transient snowline"
snowline_glac_monthly.units = "m a.s.l."
snowline_glac_monthly.comment = "transient snowline is the line separating the snow from ice/firn"
area_glac_annual = netcdf_output.createVariable('area_glac_annual', np.float64,
('glac_idx', 'year_plus1', 'sim'))
if input.option_wateryear == 1:
area_glac_annual.long_name = "glacier area by hydrological year"
elif input.option_wateryear == 2:
area_glac_annual.long_name = "glacier area by calendar year"
elif input.option_wateryear == 3:
area_glac_annual.long_name = "glacier area by custom year"
else:
area_glac_annual.long_name = "glacier area"
area_glac_annual.units = "km**2"
area_glac_annual.comment = "the area that was used for the duration of the defined start/end of year"
volume_glac_annual = netcdf_output.createVariable('volume_glac_annual', np.float64,
('glac_idx', 'year_plus1', 'sim'))
if input.option_wateryear == 1:
volume_glac_annual.long_name = "glacier volume by hydrological year"
elif input.option_wateryear == 2:
volume_glac_annual.long_name = "glacier volume by calendar year"
elif input.option_wateryear == 3:
volume_glac_annual.long_name = "glacier volume by custom year"
else:
volume_glac_annual.long_name = "glacier volume"
volume_glac_annual.units = "km**3 ice"
volume_glac_annual.comment = "the volume based on area and ice thickness used for that year"
ELA_glac_annual = netcdf_output.createVariable('ELA_glac_annual', np.float64, ('glac_idx', 'year', 'sim'))
ELA_glac_annual.long_name = "annual equilibrium line altitude"
ELA_glac_annual.units = "m a.s.l."
ELA_glac_annual.comment = "equilibrium line altitude is the elevation where the climatic mass balance is zero"
netcdf_output.close()
def netcdfwrite(netcdf_fn, glac, modelparameters, glacier_rgi_table, elev_bins, glac_bin_temp, glac_bin_prec,
glac_bin_acc, glac_bin_refreeze, glac_bin_snowpack, glac_bin_melt, glac_bin_frontalablation,
glac_bin_massbalclim, glac_bin_massbalclim_annual, glac_bin_area_annual, glac_bin_icethickness_annual,
glac_bin_width_annual, glac_bin_surfacetype_annual, output_filepath=input.output_filepath, sim=0):
"""
Write to the netcdf file that has already been generated to store the desired output
Parameters
----------
netcdf_fn : str
netcdf filename that is being filled in
glac : int
glacier index number used to determine where to write model results
glacier_rgi_table : pandas series
series containing relevant rgi glacier information
elev_bins : numpy array
elevation bins
glac_bin_temp : numpy array
temperature for each elevation bin for each timestep
glac_bin_prec : numpy array
precipitation (liquid) for each elevation bin for each timestep
glac_bin_acc : numpy array
accumulation (solid precipitation) for each elevation bin for each timestep
glac_bin_refreeze : numpy array
refreeze for each elevation bin for each timestep
glac_bin_snowpack : numpy array
snowpack for each elevation bin for each timestep
glac_bin_melt : numpy array
glacier melt for each elevation bin for each timestep
glac_bin_frontalablation : numpy array
frontal ablation for each elevation bin for each timestep
glac_bin_massbalclim : numpy array
climatic mass balance for each elevation bin for each timestep
glac_bin_massbalclim_annual : numpy array
annual climatic mass balance for each elevation bin for each timestep
glac_bin_area_annual : numpy array
annual glacier area for each elevation bin for each timestep
glac_bin_icethickness_annual: numpy array
annual ice thickness for each elevation bin for each timestep
glac_bin_width_annual : numpy array
annual glacier width for each elevation bin for each timestep
glac_bin_surfacetype_annual : numpy array
annual surface type for each elevation bin for each timestep
output_filepath : str
output filepath of where to store netcdf file
sim : int
simulation index used to write model results
Returns
-------
netcdf file with model results filled in
"""
# Open netcdf file to write to existing file ('r+')
netcdf_output = nc.Dataset(output_filepath + netcdf_fn, 'r+')
# Record the variables for each glacier (remove data associated with spinup years)
if input.output_package == 1:
# Package 1 "Raw Package" output [units: m w.e. unless otherwise specified]:
# monthly variables for each bin (temp, prec, acc, refreeze, snowpack, melt, frontalablation, massbal_clim)
# annual variables for each bin (area, icethickness, surfacetype)
# Write variables to netcdf
netcdf_output.variables['temp_bin_monthly'][glac,:,:] = glac_bin_temp
netcdf_output.variables['prec_bin_monthly'][glac,:,:] = glac_bin_prec
netcdf_output.variables['acc_bin_monthly'][glac,:,:] = glac_bin_acc
netcdf_output.variables['refreeze_bin_monthly'][glac,:,:] = glac_bin_refreeze
netcdf_output.variables['snowpack_bin_monthly'][glac,:,:] = glac_bin_snowpack
netcdf_output.variables['melt_bin_monthly'][glac,:,:] = glac_bin_melt
netcdf_output.variables['frontalablation_bin_monthly'][glac,:,:] = glac_bin_frontalablation
netcdf_output.variables['massbalclim_bin_monthly'][glac,:,:] = glac_bin_massbalclim
netcdf_output.variables['area_bin_annual'][glac,:,:] = glac_bin_area_annual
netcdf_output.variables['icethickness_bin_annual'][glac,:,:] = glac_bin_icethickness_annual
netcdf_output.variables['width_bin_annual'][glac,:,:] = glac_bin_width_annual
netcdf_output.variables['surfacetype_bin_annual'][glac,:,:] = glac_bin_surfacetype_annual
elif input.output_package == 2:
# Package 2 "Glaciologist Package" output [units: m w.e. unless otherwise specified]:
# monthly glacier-wide variables (prec, acc, refreeze, melt, frontalablation, massbal_total, runoff, snowline)
# annual glacier-wide variables (area, volume, ELA)
# Preset desired output (needed to avoid dividing by zero)
glac_wide_temp = np.zeros(glac_bin_temp.shape[1])
glac_wide_prec = np.zeros(glac_bin_temp.shape[1])
glac_wide_acc = np.zeros(glac_bin_temp.shape[1])
glac_wide_refreeze = np.zeros(glac_bin_temp.shape[1])
glac_wide_melt = np.zeros(glac_bin_temp.shape[1])
glac_wide_frontalablation = np.zeros(glac_bin_temp.shape[1])
# Compute desired output
glac_bin_area = glac_bin_area_annual[:,0:glac_bin_area_annual.shape[1]-1].repeat(12,axis=1)
glac_wide_area = glac_bin_area.sum(axis=0)
glac_wide_temp_sum = glac_bin_temp.sum(axis=0)
glac_bin_temp_nonzero = np.zeros(glac_bin_temp.shape)
glac_bin_temp_nonzero[glac_bin_temp != 0] = 1
glac_wide_temp_bincount = glac_bin_temp_nonzero.sum(axis=0)
glac_wide_temp[glac_wide_temp_bincount > 0] = (glac_wide_temp_sum[glac_wide_temp_bincount > 0] /
glac_wide_temp_bincount[glac_wide_temp_bincount > 0])
glac_wide_prec_mkm2 = (glac_bin_prec * glac_bin_area).sum(axis=0)
glac_wide_prec[glac_wide_prec_mkm2 > 0] = (glac_wide_prec_mkm2[glac_wide_prec_mkm2 > 0] /
glac_wide_area[glac_wide_prec_mkm2 > 0])
glac_wide_acc_mkm2 = (glac_bin_acc * glac_bin_area).sum(axis=0)
glac_wide_acc[glac_wide_acc_mkm2 > 0] = (glac_wide_acc_mkm2[glac_wide_acc_mkm2 > 0] /
glac_wide_area[glac_wide_acc_mkm2 > 0])
glac_wide_refreeze_mkm2 = (glac_bin_refreeze * glac_bin_area).sum(axis=0)
glac_wide_refreeze[glac_wide_refreeze_mkm2 > 0] = (glac_wide_refreeze_mkm2[glac_wide_refreeze_mkm2 > 0] /
glac_wide_area[glac_wide_refreeze_mkm2 > 0])
glac_wide_melt_mkm2 = (glac_bin_melt * glac_bin_area).sum(axis=0)
glac_wide_melt[glac_wide_melt_mkm2 > 0] = (glac_wide_melt_mkm2[glac_wide_melt_mkm2 > 0] /
glac_wide_area[glac_wide_melt_mkm2 > 0])
glac_wide_frontalablation_mkm2 = (glac_bin_frontalablation * glac_bin_area).sum(axis=0)
glac_wide_frontalablation[glac_wide_frontalablation_mkm2 > 0] = (
glac_wide_frontalablation_mkm2[glac_wide_frontalablation_mkm2 > 0] /
glac_wide_area[glac_wide_frontalablation_mkm2 > 0])
glac_wide_massbalclim = glac_wide_acc + glac_wide_refreeze - glac_wide_melt
glac_wide_massbaltotal = glac_wide_massbalclim - glac_wide_frontalablation
glac_wide_runoff = (glac_wide_prec + glac_wide_melt - glac_wide_refreeze) * glac_wide_area * (1000)**2
# units: (m + m w.e. - m w.e.) * km**2 * (1000 m / 1 km)**2 = m**3
glac_wide_snowline = (glac_bin_snowpack > 0).argmax(axis=0)
glac_wide_snowline[glac_wide_snowline > 0] = (elev_bins[glac_wide_snowline[glac_wide_snowline > 0]] -
input.binsize/2)
glac_wide_area_annual = glac_bin_area_annual.sum(axis=0)
glac_wide_volume_annual = (glac_bin_area_annual * glac_bin_icethickness_annual / 1000).sum(axis=0)
glac_wide_ELA_annual = (glac_bin_massbalclim_annual > 0).argmax(axis=0)
glac_wide_ELA_annual[glac_wide_ELA_annual > 0] = (elev_bins[glac_wide_ELA_annual[glac_wide_ELA_annual > 0]] -
input.binsize/2)
# Write variables to netcdf
netcdf_output.variables['temp_glac_monthly'][glac,:,sim] = glac_wide_temp
netcdf_output.variables['prec_glac_monthly'][glac,:,sim] = glac_wide_prec
netcdf_output.variables['acc_glac_monthly'][glac,:,sim] = glac_wide_acc
netcdf_output.variables['refreeze_glac_monthly'][glac,:,sim] = glac_wide_refreeze
netcdf_output.variables['melt_glac_monthly'][glac,:,sim] = glac_wide_melt
netcdf_output.variables['frontalablation_glac_monthly'][glac,:,sim] = glac_wide_frontalablation
netcdf_output.variables['massbaltotal_glac_monthly'][glac,:,sim] = glac_wide_massbaltotal
netcdf_output.variables['runoff_glac_monthly'][glac,:,sim] = glac_wide_runoff
netcdf_output.variables['snowline_glac_monthly'][glac,:,sim] = glac_wide_snowline
netcdf_output.variables['area_glac_annual'][glac,:,sim] = glac_wide_area_annual
netcdf_output.variables['volume_glac_annual'][glac,:,sim] = glac_wide_volume_annual
netcdf_output.variables['ELA_glac_annual'][glac,:,sim] = glac_wide_ELA_annual
# Close the netcdf file
netcdf_output.close()
#def netcdfcreate_calgridsearch(regionO1_number, main_glac_hyps, dates_table, modelparameters):
# # Annual columns
# annual_columns = np.unique(dates_table['wateryear'].values)
# # Netcdf file path and name
# filename = input.calibrationnetcdf_filenameprefix + str(regionO1_number) + '_' + str(strftime("%Y%m%d")) + '.nc'
# fullfilename = input.output_filepath + filename
# # Create netcdf file ('w' will overwrite existing files, 'r+' will open existing file to write)
# netcdf_output = nc.Dataset(fullfilename, 'w', format='NETCDF4')
# # Global attributes
# netcdf_output.description = 'Results from glacier evolution model'
# netcdf_output.history = 'Created ' + str(strftime("%Y-%m-%d %H:%M:%S"))
# netcdf_output.source = 'Python Glacier Evolution Model'
# # Dimensions
# glac_idx = netcdf_output.createDimension('glac_idx', None)
# elevbin = netcdf_output.createDimension('elevbin', main_glac_hyps.shape[1])
# if input.timestep == 'monthly':
# time = netcdf_output.createDimension('time', dates_table.shape[0] - input.spinupyears * 12)
# year = netcdf_output.createDimension('year', annual_columns.shape[0] - input.spinupyears)
# year_plus1 = netcdf_output.createDimension('year_plus1', annual_columns.shape[0] - input.spinupyears + 1)
# gridround = netcdf_output.createDimension('gridround', modelparameters.shape[0])
# gridparam = netcdf_output.createDimension('gridparam', modelparameters.shape[1])
# glacierinfo = netcdf_output.createDimension('glacierinfo', 3)
# # Variables associated with dimensions
# glaciers = netcdf_output.createVariable('glac_idx', np.int32, ('glac_idx',))
# glaciers.long_name = "glacier number associated with model run"
# glaciers.standard_name = "GlacNo"
# glaciers.comment = ("The glacier number is defined for each model run. The user should look at the main_glac_rgi"
# + " table to determine the RGIID or other information regarding this particular glacier.")
# elevbins = netcdf_output.createVariable('elevbin', np.int32, ('elevbin',))
# elevbins.standard_name = "center of elevation bin"
# elevbins.units = "m a.s.l."
# elevbins[:] = main_glac_hyps.columns.values
# times = netcdf_output.createVariable('time', np.float64, ('time',))
# times.standard_name = "date"
# times.units = "days since 1900-01-01 00:00:00"
# times.calendar = "gregorian"
# if input.timestep == 'monthly':
# times[:] = (nc.date2num(dates_table.loc[input.spinupyears*12:dates_table.shape[0]+1,'date'].astype(datetime),
# units = times.units, calendar = times.calendar))
# years = netcdf_output.createVariable('year', np.int32, ('year',))
# years.standard_name = "year"
# if input.option_wateryear == 1:
# years.units = 'water year'
# elif input.option_wateryear == 0:
# years.units = 'calendar year'
# years[:] = annual_columns[input.spinupyears:annual_columns.shape[0]]
# # years_plus1 adds an additional year such that the change in glacier dimensions (area, etc.) is recorded
# years_plus1 = netcdf_output.createVariable('year_plus1', np.int32, ('year_plus1',))
# years_plus1.standard_name = "year with additional year to record glacier dimension changes"
# if input.option_wateryear == 1:
# years_plus1.units = 'water year'
# elif input.option_wateryear == 0:
# years_plus1.units = 'calendar year'
# years_plus1[:] = np.concatenate((annual_columns[input.spinupyears:annual_columns.shape[0]],
# np.array([annual_columns[annual_columns.shape[0]-1]+1])))
# gridrounds = netcdf_output.createVariable('gridround', np.int32, ('gridround',))
# gridrounds.long_name = "number associated with the calibration grid search"
# glacierinfoheader = netcdf_output.createVariable('glacierinfoheader', str, ('glacierinfo',))
# glacierinfoheader.standard_name = "information about each glacier from main_glac_rgi"
# glacierinfoheader[:] = np.array(['RGIID','lat','lon'])
# glacierinfo = netcdf_output.createVariable('glacierinfo',str,('glac_idx','glacierinfo',))
# # Variables associated with the output
# # monthly glacier-wide variables (massbal_total, runoff, snowline, snowpack)
# # annual glacier-wide variables (area, volume, ELA)
# grid_modelparameters = netcdf_output.createVariable('grid_modelparameters', np.float64, ('gridround', 'gridparam'))
# grid_modelparameters.standard_name = ("grid model parameters [lrglac, lrgcm, precfactor, precgrad, ddfsnow, ddfice,"
# + " tempsnow, tempchange]")
# grid_modelparameters[:] = modelparameters
# massbaltotal_glac_monthly = netcdf_output.createVariable('massbaltotal_glac_monthly', np.float64,
# ('glac_idx', 'gridround', 'time'))
# massbaltotal_glac_monthly.standard_name = "glacier-wide total mass balance"
# massbaltotal_glac_monthly.units = "m w.e."
# massbaltotal_glac_monthly.comment = ("total mass balance is the sum of the climatic mass balance and frontal "
# + "ablation.")
# runoff_glac_monthly = netcdf_output.createVariable('runoff_glac_monthly', np.float64,
# ('glac_idx', 'gridround', 'time'))
# runoff_glac_monthly.standard_name = "glacier runoff"
# runoff_glac_monthly.units = "m**3"
# runoff_glac_monthly.comment = "runoff from the glacier terminus, which moves over time"
# snowline_glac_monthly = netcdf_output.createVariable('snowline_glac_monthly', np.float64,
# ('glac_idx', 'gridround', 'time'))
# snowline_glac_monthly.standard_name = "transient snowline"
# snowline_glac_monthly.units = "m a.s.l."
# snowline_glac_monthly.comment = "transient snowline is the line separating the snow from ice/firn"
# snowpack_glac_monthly = netcdf_output.createVariable('snowpack_glac_monthly', np.float64,
# ('glac_idx', 'gridround', 'time'))
# snowpack_glac_monthly.standard_name = "snowpack volume"
# snowpack_glac_monthly.units = "km**3 w.e."
# snowpack_glac_monthly.comment = "m w.e. multiplied by the area converted to km**3"
# area_glac_annual = netcdf_output.createVariable('area_glac_annual', np.float64,
# ('glac_idx', 'gridround', 'year_plus1'))
# area_glac_annual.standard_name = "glacier area"
# area_glac_annual.units = "km**2"
# area_glac_annual.comment = "the area that was used for the duration of the year"
# volume_glac_annual = netcdf_output.createVariable('volume_glac_annual', np.float64,
# ('glac_idx', 'gridround', 'year_plus1'))
# volume_glac_annual.standard_name = "glacier volume"
# volume_glac_annual.units = "km**3 ice"
# volume_glac_annual.comment = "the volume based on area and ice thickness used for that year"
# ELA_glac_annual = netcdf_output.createVariable('ELA_glac_annual', np.float64, ('glac_idx', 'gridround', 'year'))
# ELA_glac_annual.standard_name = "annual equilibrium line altitude"
# ELA_glac_annual.units = "m a.s.l."
# ELA_glac_annual.comment = "equilibrium line altitude is the elevation where the climatic mass balance is zero"
# netcdf_output.close()
# return fullfilename
#
#
#def netcdfwrite_calgridsearch(fullfilename, glac, glacier_rgi_table, output_glac_wide_massbaltotal,
# output_glac_wide_runoff, output_glac_wide_snowline, output_glac_wide_snowpack,
# output_glac_wide_area_annual, output_glac_wide_volume_annual,
# output_glac_wide_ELA_annual):
# # Open netcdf file to write to existing file ('r+')
# netcdf_output = nc.Dataset(fullfilename, 'r+')
# # Write variables to netcdf
# netcdf_output.variables['glacierinfo'][glac,:] = np.array([glacier_rgi_table.loc['RGIId'],
# glacier_rgi_table.loc[input.lat_colname], glacier_rgi_table.loc[input.lon_colname]])
# netcdf_output.variables['massbaltotal_glac_monthly'][glac,:,:] = output_glac_wide_massbaltotal
# netcdf_output.variables['runoff_glac_monthly'][glac,:,:] = output_glac_wide_runoff
# netcdf_output.variables['snowline_glac_monthly'][glac,:,:] = output_glac_wide_snowline
# netcdf_output.variables['snowpack_glac_monthly'][glac,:,:] = output_glac_wide_snowpack
# netcdf_output.variables['area_glac_annual'][glac,:,:] = output_glac_wide_area_annual
# netcdf_output.variables['volume_glac_annual'][glac,:,:] = output_glac_wide_volume_annual
# netcdf_output.variables['ELA_glac_annual'][glac,:,:] = output_glac_wide_ELA_annual
# netcdf_output.close()
#%%===== PLOT FUNCTIONS =============================================================================================
def plot_latlonvar(lons, lats, variable, rangelow, rangehigh, title, xlabel, ylabel, colormap, east, west, south, north,
xtick, ytick):
"""
Plot a variable according to its latitude and longitude
"""
# Create the projection
ax = plt.axes(projection=cartopy.crs.PlateCarree())
# Add country borders for reference
ax.add_feature(cartopy.feature.BORDERS)
# Set the extent
ax.set_extent([east, west, south, north], cartopy.crs.PlateCarree())
# Label title, x, and y axes
plt.title(title)
ax.set_xticks(np.arange(east,west+1,xtick), cartopy.crs.PlateCarree())
ax.set_yticks(np.arange(south,north+1,ytick), cartopy.crs.PlateCarree())
plt.xlabel(xlabel)
plt.ylabel(ylabel)
# Plot the data
plt.scatter(lons, lats, c=variable, cmap=colormap)
# plotting x, y, size [s=__], color bar [c=__]
plt.clim(rangelow,rangehigh)
# set the range of the color bar
plt.colorbar(fraction=0.02, pad=0.04)
# fraction resizes the colorbar, pad is the space between the plot and colorbar
plt.show()
def plot_caloutput(data):
"""
Plot maps and histograms of the calibration parameters to visualize results
"""
# Set extent
east = int(round(data['CenLon'].min())) - 1
west = int(round(data['CenLon'].max())) + 1
south = int(round(data['CenLat'].min())) - 1
north = int(round(data['CenLat'].max())) + 1
xtick = 1
ytick = 1
# Select relevant data
lats = data['CenLat'][:]
lons = data['CenLon'][:]
precfactor = data['precfactor'][:]
tempchange = data['tempchange'][:]
ddfsnow = data['ddfsnow'][:]
calround = data['calround'][:]
massbal = data['MB_geodetic_mwea']
# Plot regional maps
plot_latlonvar(lons, lats, massbal, 'Geodetic mass balance [mwea]', 'longitude [deg]', 'latitude [deg]', east, west,
south, north, xtick, ytick)
plot_latlonvar(lons, lats, precfactor, 'precipitation factor', 'longitude [deg]', 'latitude [deg]', east, west,
south, north, xtick, ytick)
plot_latlonvar(lons, lats, tempchange, 'Temperature bias [degC]', 'longitude [deg]', 'latitude [deg]', east, west,
south, north, xtick, ytick)
plot_latlonvar(lons, lats, ddfsnow, 'DDF_snow [m w.e. d-1 degC-1]', 'longitude [deg]', 'latitude [deg]', east, west,
south, north, xtick, ytick)
plot_latlonvar(lons, lats, calround, 'Calibration round', 'longitude [deg]', 'latitude [deg]', east, west,
south, north, xtick, ytick)
# Plot histograms
data.hist(column='MB_difference_mwea', bins=50)
plt.title('Mass Balance Difference [mwea]')
data.hist(column='precfactor', bins=50)
plt.title('Precipitation factor [-]')
data.hist(column='tempchange', bins=50)
plt.title('Temperature bias [degC]')
data.hist(column='ddfsnow', bins=50)
plt.title('DDFsnow [mwe d-1 degC-1]')
plt.xticks(rotation=60)
data.hist(column='calround', bins = [0.5, 1.5, 2.5, 3.5])
plt.title('Calibration round')
plt.xticks([1, 2, 3])
#%%
if __name__ == '__main__':
gcm_list_fn = input.main_directory + '/../Climate_data/cmip5/gcm_rcp26_filenames.txt'
rcp_scenario = 'rcp26'
output_filepath = input.main_directory + '/../Output/'
output_prefix = 'PyGEM_R15_'
with open(gcm_list_fn, 'r') as gcm_fn:
gcm_list = gcm_fn.read().splitlines()
gcm_reg_annual_volume = np.zeros((len(gcm_list),101))
# for n_gcm in range(len(gcm_list)):
## for n_gcm in [0]:
# gcm = gcm_list[n_gcm]
# print(n_gcm, gcm)
# gcm_fn = glob.glob(output_filepath + output_prefix + gcm + '_' + rcp_scenario + '_2000_2100' + '*.nc')[0]
# output = nc.Dataset(gcm_fn)
# glac_annual_volume = output['volume_glac_annual'][:][:,:-1]
# reg_annual_volume = glac_annual_volume.sum(axis=0)
# annual_columns = output['year'][:]
# gcm_reg_annual_volume[n_gcm,:] = reg_annual_volume
# print(reg_annual_volume[100])
# output.close()
gcm_fn = output_filepath + 'PyGEM_R15_MPI-ESM-LR_rcp26_2000_2100_20180428.nc'
# gcm_fn = output_filepath + 'PyGEM_R15_NorESM1-ME_rcp26_2000_2100_20180428.nc'
output = nc.Dataset(gcm_fn)
glac_annual_volume = output['volume_glac_annual'][:][:,:-1]
reg_annual_volume = glac_annual_volume.sum(axis=0)
annual_columns = output['year'][:]
# Label title, x, and y axes
# plt.title(title)
# plt.xlabel(xlabel)
# plt.ylabel(ylabel)
# Plot the data
plt.scatter(annual_columns, reg_annual_volume)
# plotting x, y, size [s=__], color bar [c=__]
# plt.clim(rangelow,rangehigh)
# set the range of the color bar
# plt.colorbar(fraction=0.02, pad=0.04)
# fraction resizes the colorbar, pad is the space between the plot and colorbar
# plt.show()
#%%===== PLOTTING ===========================================================================================
#netcdf_output15 = nc.Dataset(input.main_directory +
# '/../Output/PyGEM_output_rgiregion15_ERAInterim_calSheanMB_nearest_20180306.nc', 'r+')
#netcdf_output15 = nc.Dataset(input.main_directory +
# '/../Output/PyGEM_output_rgiregion15_ERAInterim_calSheanMB_transferAvg_20180306.nc', 'r+')
#netcdf_output14 = nc.Dataset(input.main_directory +
# '/../Output/PyGEM_output_rgiregion14_ERAInterim_calSheanMB_nearest_20180313.nc', 'r+')
#netcdf_output14 = nc.Dataset(input.main_directory +
# '/../Output/PyGEM_output_rgiregion14_ERAInterim_calSheanMB_transferAvg_20180313.nc', 'r+')
#
## Select relevant data
#glacier_data15 = pd.DataFrame(netcdf_output15['glacierparameter'][:])
#glacier_data15.columns = netcdf_output15['glacierparameters'][:]
#lats15 = glacier_data15['lat'].values.astype(float)
#lons15 = glacier_data15['lon'].values.astype(float)
#massbal_total15 = netcdf_output15['massbaltotal_glac_monthly'][:]
#massbal_total_mwea15 = massbal_total15.sum(axis=1)/(massbal_total15.shape[1]/12)
#volume_glac_annual15 = netcdf_output15['volume_glac_annual'][:]
#volume_reg_annual15 = volume_glac_annual15.sum(axis=0)
#volume_reg_annualnorm15 = volume_reg_annual15 / volume_reg_annual15[0]
#runoff_glac_monthly15 = netcdf_output15['runoff_glac_monthly'][:]
#runoff_reg_monthly15 = runoff_glac_monthly15.mean(axis=0)
#acc_glac_monthly15 = netcdf_output15['acc_glac_monthly'][:]
#acc_reg_monthly15 = acc_glac_monthly15.mean(axis=0)
#acc_reg_annual15 = np.sum(acc_reg_monthly15.reshape(-1,12), axis=1)
#refreeze_glac_monthly15 = netcdf_output15['refreeze_glac_monthly'][:]
#refreeze_reg_monthly15 = refreeze_glac_monthly15.mean(axis=0)
#refreeze_reg_annual15 = np.sum(refreeze_reg_monthly15.reshape(-1,12), axis=1)
#melt_glac_monthly15 = netcdf_output15['melt_glac_monthly'][:]
#melt_reg_monthly15 = melt_glac_monthly15.mean(axis=0)
#melt_reg_annual15 = np.sum(melt_reg_monthly15.reshape(-1,12), axis=1)
#massbaltotal_glac_monthly15 = netcdf_output15['massbaltotal_glac_monthly'][:]
#massbaltotal_reg_monthly15 = massbaltotal_glac_monthly15.mean(axis=0)
#massbaltotal_reg_annual15 = np.sum(massbaltotal_reg_monthly15.reshape(-1,12), axis=1)
#glacier_data14 = pd.DataFrame(netcdf_output14['glacierparameter'][:])
#glacier_data14.columns = netcdf_output14['glacierparameters'][:]
#lats14 = glacier_data14['lat'].values.astype(float)
#lons14 = glacier_data14['lon'].values.astype(float)
#massbal_total14 = netcdf_output14['massbaltotal_glac_monthly'][:]
#massbal_total_mwea14 = massbal_total14.sum(axis=1)/(massbal_total14.shape[1]/12)
#volume_glac_annual14 = netcdf_output14['volume_glac_annual'][:]
#volume_reg_annual14 = volume_glac_annual14.sum(axis=0)
#volume_reg_annualnorm14 = volume_reg_annual14 / volume_reg_annual14[0]
#runoff_glac_monthly14 = netcdf_output14['runoff_glac_monthly'][:]
#runoff_reg_monthly14 = runoff_glac_monthly14.mean(axis=0)
#acc_glac_monthly14 = netcdf_output14['acc_glac_monthly'][:]
#acc_reg_monthly14 = acc_glac_monthly14.mean(axis=0)
#acc_reg_annual14 = np.sum(acc_reg_monthly14.reshape(-1,12), axis=1)
#refreeze_glac_monthly14 = netcdf_output14['refreeze_glac_monthly'][:]
#refreeze_reg_monthly14 = refreeze_glac_monthly14.mean(axis=0)
#refreeze_reg_annual14 = np.sum(refreeze_reg_monthly14.reshape(-1,12), axis=1)
#melt_glac_monthly14 = netcdf_output14['melt_glac_monthly'][:]
#melt_reg_monthly14 = melt_glac_monthly14.mean(axis=0)
#melt_reg_annual14 = np.sum(melt_reg_monthly14.reshape(-1,12), axis=1)
#massbaltotal_glac_monthly14 = netcdf_output14['massbaltotal_glac_monthly'][:]
#massbaltotal_reg_monthly14 = massbaltotal_glac_monthly14.mean(axis=0)
#massbaltotal_reg_annual14 = np.sum(massbaltotal_reg_monthly14.reshape(-1,12), axis=1)
#years = np.arange(2000, 2016 + 1)
#month = np.arange(2000, 2016, 1/12)
#plt.plot(years,volume_reg_annualnorm15, label='Region 15')
#plt.plot(years,volume_reg_annualnorm14, label='Region 14')
#plt.ylabel('Volume normalized [-]', size=15)
#plt.legend()
#plt.show()
#plt.plot(month,runoff_reg_monthly15, label='Region 15')
#plt.ylabel('Runoff [m3 / month]', size=15)
#plt.legend()
#plt.show()
##plt.plot(month, massbaltotal_reg_monthly, label='massbal_total')
##plt.plot(month, acc_reg_monthly, label='accumulation')
##plt.plot(month, refreeze_reg_monthly, label='refreeze')
##plt.plot(month, -1*melt_reg_monthly, label='melt')
##plt.ylabel('monthly regional mean [m.w.e.] / month')
##plt.legend()
##plt.show()
#plt.plot(years[0:16], massbaltotal_reg_annual15, label='massbal_total')
#plt.plot(years[0:16], acc_reg_annual15, label='accumulation')
#plt.plot(years[0:16], refreeze_reg_annual15, label='refreeze')
#plt.plot(years[0:16], -1*melt_reg_annual15, label='melt')
#plt.ylabel('Region 15 annual mean [m.w.e.]', size=15)
#plt.legend()
#plt.show()
#
#lons = np.concatenate((lons14, lons15), axis=0)
#lats = np.concatenate((lats14, lats15), axis=0)
#massbal_total_mwea = np.concatenate((massbal_total_mwea14, massbal_total_mwea15), axis=0)
#
## Set extent
#east = int(round(lons.min())) - 1
#west = int(round(lons.max())) + 1
#south = int(round(lats.min())) - 1
#north = int(round(lats.max())) + 1
#xtick = 1
#ytick = 1
## Plot regional maps
#plot_latlonvar(lons, lats, massbal_total_mwea, -1.5, 0.5, 'Modeled mass balance [mwea]', 'longitude [deg]',
# 'latitude [deg]', 'jet_r', east, west, south, north, xtick, ytick)
#%% ====== PLOTTING FOR CALIBRATION FUNCTION ======================================================================
### Plot histograms and regional variations
#data13 = pd.read_csv(input.main_directory + '/../Output/calibration_R13_20180318_Opt01solutionspaceexpanding.csv')
#data13 = data13.dropna()
##data14 = pd.read_csv(input.main_directory + '/../Output/calibration_R14_20180313_Opt01solutionspaceexpanding.csv')
##data14 = data14.dropna()
##data15 = pd.read_csv(input.main_directory + '/../Output/calibration_R15_20180306_Opt01solutionspaceexpanding.csv')
##data15 = data15.dropna()
#data = data13
#
## Concatenate the data
##frames = [data13, data14, data15]
##data = pd.concat(frames)
#
### Fill in values with average
### Subset all values that have data
##data_subset = data.dropna()
##data_subset_params = data_subset[['lrgcm','lrglac','precfactor','precgrad','ddfsnow','ddfice','tempsnow','tempchange']]
##data_subset_paramsavg = data_subset_params.mean()
##paramsfilled = data[['lrgcm','lrglac','precfactor','precgrad','ddfsnow','ddfice','tempsnow','tempchange']]
##paramsfilled = paramsfilled.fillna(data_subset_paramsavg)
#
## Set extent
#east = int(round(data['CenLon'].min())) - 1
#west = int(round(data['CenLon'].max())) + 1
#south = int(round(data['CenLat'].min())) - 1
#north = int(round(data['CenLat'].max())) + 1
#xtick = 1
#ytick = 1
## Select relevant data
#lats = data['CenLat'][:]
#lons = data['CenLon'][:]
#precfactor = data['precfactor'][:]
#tempchange = data['tempchange'][:]
#ddfsnow = data['ddfsnow'][:]
#calround = data['calround'][:]
#massbal = data['MB_geodetic_mwea']
## Plot regional maps
#plot_latlonvar(lons, lats, massbal, -1.5, 0.5, 'Geodetic mass balance [mwea]', 'longitude [deg]', 'latitude [deg]',
# 'jet_r', east, west, south, north, xtick, ytick)
#plot_latlonvar(lons, lats, precfactor, 0.8, 1.3, 'Precipitation factor [-]', 'longitude [deg]', 'latitude [deg]',
# 'jet_r', east, west, south, north, xtick, ytick)
#plot_latlonvar(lons, lats, tempchange, -4, 2, 'Temperature bias [degC]', 'longitude [deg]', 'latitude [deg]',
# 'jet', east, west, south, north, xtick, ytick)
#plot_latlonvar(lons, lats, ddfsnow, 0.003, 0.005, 'DDF_snow [m w.e. d-1 degC-1]', 'longitude [deg]', 'latitude [deg]',
# 'jet', east, west, south, north, xtick, ytick)
#plot_latlonvar(lons, lats, calround, 1, 3, 'Calibration round', 'longitude [deg]', 'latitude [deg]',
# 'jet_r', east, west, south, north, xtick, ytick)
## Plot histograms
#data.hist(column='MB_difference_mwea', bins=50)
#plt.title('Mass Balance Difference [mwea]')
#data.hist(column='precfactor', bins=50)
#plt.title('Precipitation factor [-]')
#data.hist(column='tempchange', bins=50)
#plt.title('Temperature bias [degC]')
#data.hist(column='ddfsnow', bins=50)
#plt.title('DDFsnow [mwe d-1 degC-1]')
#plt.xticks(rotation=60)
#data.hist(column='calround', bins = [0.5, 1.5, 2.5, 3.5])
#plt.title('Calibration round')
#plt.xticks([1, 2, 3])
#
### run plot function
##output.plot_caloutput(data)
| 61.551414
| 122
| 0.6654
|
b791bbc812e882200d65cfc37d7668bef69c2742
| 4,203
|
py
|
Python
|
online/migrations/0001_initial.py
|
neosergio/hackatrix-api
|
27f0180415efa97bd7345d100b314d8807486b67
|
[
"Apache-2.0"
] | 1
|
2021-02-12T10:25:28.000Z
|
2021-02-12T10:25:28.000Z
|
online/migrations/0001_initial.py
|
neosergio/hackatrix-api
|
27f0180415efa97bd7345d100b314d8807486b67
|
[
"Apache-2.0"
] | 7
|
2020-02-21T00:53:38.000Z
|
2022-02-10T12:22:53.000Z
|
online/migrations/0001_initial.py
|
neosergio/hackatrix-api
|
27f0180415efa97bd7345d100b314d8807486b67
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 2.2.10 on 2020-04-28 15:53
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='EvaluationCommittee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('is_evaluation_closed', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('project_description', models.TextField()),
('project', models.CharField(max_length=100)),
('total_score', models.FloatField(default=0)),
('evaluation_committee', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='online.EvaluationCommittee')),
],
),
migrations.CreateModel(
name='TeamMember',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('surname', models.CharField(max_length=100)),
('email', models.EmailField(max_length=254, unique=True)),
('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online.Team')),
],
options={
'verbose_name': 'team member',
'verbose_name_plural': 'team members',
'ordering': ['email'],
},
),
migrations.CreateModel(
name='Evaluator',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('evaluation_committee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online.EvaluationCommittee')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Evaluation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.TextField(blank=True, null=True)),
('total_score', models.FloatField(default=0)),
('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online.Team')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online.Evaluator')),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('team', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online.Team')),
],
),
migrations.CreateModel(
name='CategoryScore',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('percentage', models.FloatField()),
('is_committee_score', models.BooleanField(default=False)),
('score', models.FloatField(default=0)),
('evaluation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='online.Evaluation')),
],
),
]
| 46.7
| 161
| 0.583155
|
a0e6c5fe18251081a220967a19c7569cd676c1b8
| 3,397
|
py
|
Python
|
nemo/collections/nlp/data/text_normalization/constants.py
|
Vishaal-MK/NeMo
|
e842e2730b02539aba0860d20e7d6771d7dd07d2
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/data/text_normalization/constants.py
|
Vishaal-MK/NeMo
|
e842e2730b02539aba0860d20e7d6771d7dd07d2
|
[
"Apache-2.0"
] | null | null | null |
nemo/collections/nlp/data/text_normalization/constants.py
|
Vishaal-MK/NeMo
|
e842e2730b02539aba0860d20e7d6771d7dd07d2
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
DECODE_CTX_SIZE = 3 # the size of the input context to be provided to the DuplexDecoderModel
LABEL_PAD_TOKEN_ID = -100
# Split names
TRAIN, DEV, TEST = 'train', 'dev', 'test'
SPLIT_NAMES = [TRAIN, DEV, TEST]
# Languages
ENGLISH = 'en'
RUSSIAN = 'ru'
GERMAN = 'de'
MULTILINGUAL = 'multilingual'
SUPPORTED_LANGS = [ENGLISH, RUSSIAN, GERMAN, MULTILINGUAL]
# Task Prefixes
ITN_TASK = 0
TN_TASK = 1
ITN_PREFIX = str(ITN_TASK)
TN_PREFIX = str(TN_TASK)
# Tagger Labels Prefixes
B_PREFIX = 'B-' # Denote beginning
I_PREFIX = 'I-' # Denote middle
TAGGER_LABELS_PREFIXES = [B_PREFIX, I_PREFIX]
# Modes
TN_MODE = 'tn'
ITN_MODE = 'itn'
JOINT_MODE = 'joint'
MODES = [TN_MODE, ITN_MODE, JOINT_MODE]
TASK_ID_TO_MODE = {ITN_TASK: ITN_MODE, TN_TASK: TN_MODE}
MODE_TO_TASK_ID = {v: k for k, v in TASK_ID_TO_MODE.items()}
# Instance Directions
INST_BACKWARD = 'BACKWARD'
INST_FORWARD = 'FORWARD'
INST_DIRECTIONS = [INST_BACKWARD, INST_FORWARD]
DIRECTIONS_TO_ID = {INST_BACKWARD: ITN_TASK, INST_FORWARD: TN_TASK}
DIRECTIONS_ID_TO_NAME = {ITN_TASK: INST_BACKWARD, TN_TASK: INST_FORWARD}
# TAGS
SAME_TAG = 'SAME' # Tag indicates that a token can be kept the same without any further transformation
TASK_TAG = 'TASK' # Tag indicates that a token belongs to a task prefix (the prefix indicates whether the current task is TN or ITN)
PUNCT_TAG = 'PUNCT' # Tag indicates that a token is a punctuation
TRANSFORM_TAG = 'TRANSFORM' # Tag indicates that a token needs to be transformed by the decoder
ALL_TAGS = [TASK_TAG, SAME_TAG, TRANSFORM_TAG]
# ALL_TAG_LABELS
ALL_TAG_LABELS = []
for prefix in TAGGER_LABELS_PREFIXES:
for tag in ALL_TAGS:
ALL_TAG_LABELS.append(prefix + tag)
ALL_TAG_LABELS.sort()
LABEL_IDS = {l: idx for idx, l in enumerate(ALL_TAG_LABELS)}
# Special Words
SIL_WORD = 'sil'
SELF_WORD = '<self>'
SPECIAL_WORDS = [SIL_WORD, SELF_WORD]
# Mappings for Greek Letters (English)
EN_GREEK_TO_SPOKEN = {
'Τ': 'tau',
'Ο': 'omicron',
'Δ': 'delta',
'Η': 'eta',
'Κ': 'kappa',
'Ι': 'iota',
'Θ': 'theta',
'Α': 'alpha',
'Σ': 'sigma',
'Υ': 'upsilon',
'Μ': 'mu',
'Ε': 'epsilon',
'Χ': 'chi',
'Π': 'pi',
'Ν': 'nu',
'Λ': 'lambda',
'Γ': 'gamma',
'Β': 'beta',
'Ρ': 'rho',
'τ': 'tau',
'υ': 'upsilon',
'μ': 'mu',
'φ': 'phi',
'α': 'alpha',
'λ': 'lambda',
'ι': 'iota',
'ς': 'sigma',
'ο': 'omicron',
'σ': 'sigma',
'η': 'eta',
'π': 'pi',
'ν': 'nu',
'γ': 'gamma',
'κ': 'kappa',
'ε': 'epsilon',
'β': 'beta',
'ρ': 'rho',
'ω': 'omega',
'χ': 'chi',
}
EN_SPOKEN_TO_GREEK = {v: k for k, v in EN_GREEK_TO_SPOKEN.items()}
# IDs for special tokens for encoding inputs of the decoder models
EXTRA_ID_0 = '<extra_id_0>'
EXTRA_ID_1 = '<extra_id_1>'
| 27.617886
| 133
| 0.663527
|
ef5a761a3fa409ad1f4cf5160883d55714198729
| 1,064
|
py
|
Python
|
scripts/addons/animation_nodes/nodes/matrix/transform_matrix.py
|
Tilapiatsu/blender-custom_conf
|
05592fedf74e4b7075a6228b8448a5cda10f7753
|
[
"MIT"
] | 2
|
2020-04-16T22:12:40.000Z
|
2022-01-22T17:18:45.000Z
|
scripts/addons/animation_nodes/nodes/matrix/transform_matrix.py
|
Tilapiatsu/blender-custom_conf
|
05592fedf74e4b7075a6228b8448a5cda10f7753
|
[
"MIT"
] | null | null | null |
scripts/addons/animation_nodes/nodes/matrix/transform_matrix.py
|
Tilapiatsu/blender-custom_conf
|
05592fedf74e4b7075a6228b8448a5cda10f7753
|
[
"MIT"
] | 2
|
2019-05-16T04:01:09.000Z
|
2020-08-25T11:42:26.000Z
|
import bpy
from ... base_types import AnimationNode, VectorizedSocket
from . c_utils import multiplyMatrixWithList
class TransformMatrixNode(bpy.types.Node, AnimationNode):
bl_idname = "an_TransformMatrixNode"
bl_label = "Transform Matrix"
useMatrixList: VectorizedSocket.newProperty()
def create(self):
self.newInput(VectorizedSocket("Matrix", "useMatrixList",
("Matrix", "inMatrix"), ("Matrices", "inMatrices")))
self.newInput("Matrix", "Transformation", "transformation")
self.newOutput(VectorizedSocket("Matrix", "useMatrixList",
("Matrix", "outMatrix"), ("Matrices", "outMatrices")))
def getExecutionFunctionName(self):
if self.useMatrixList:
return "execute_MatrixList"
else:
return "execute_Matrix"
def execute_Matrix(self, inMatrix, transformation):
return transformation @ inMatrix
def execute_MatrixList(self, inMatrices, _transformation):
return multiplyMatrixWithList(inMatrices, _transformation, type = "LEFT")
| 34.322581
| 81
| 0.695489
|
94130d4cabe324f7da8b263102457c8b4c245b99
| 339
|
py
|
Python
|
osf/migrations/0099_merge_20180426_0930.py
|
gaybro8777/osf.io
|
30408511510a40bc393565817b343ef5fd76ab14
|
[
"Apache-2.0"
] | 628
|
2015-01-15T04:33:22.000Z
|
2022-03-30T06:40:10.000Z
|
osf/migrations/0099_merge_20180426_0930.py
|
gaybro8777/osf.io
|
30408511510a40bc393565817b343ef5fd76ab14
|
[
"Apache-2.0"
] | 4,712
|
2015-01-02T01:41:53.000Z
|
2022-03-30T14:18:40.000Z
|
osf/migrations/0099_merge_20180426_0930.py
|
Johnetordoff/osf.io
|
de10bf249c46cede04c78f7e6f7e352c69e6e6b5
|
[
"Apache-2.0"
] | 371
|
2015-01-12T16:14:08.000Z
|
2022-03-31T18:58:29.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-26 14:30
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0098_merge_20180416_1807'),
('osf', '0096_add_provider_doi_prefixes'),
]
operations = [
]
| 19.941176
| 50
| 0.663717
|
d2fa0cdff4e905d8ede65c3a590886400be92b88
| 759
|
py
|
Python
|
usaspending_api/common/zip.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | 1
|
2020-08-14T04:14:32.000Z
|
2020-08-14T04:14:32.000Z
|
usaspending_api/common/zip.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | null | null | null |
usaspending_api/common/zip.py
|
g4brielvs/usaspending-api
|
bae7da2c204937ec1cdf75c052405b13145728d5
|
[
"CC0-1.0"
] | null | null | null |
from zipfile import ZipFile
def extract_single_file_zip(zip_file_path, destination_directory_path):
"""
Accepts a zip file path and destination directory path then extracts a single file from zip file
into the desintation directory. ZIP archive must contain one and only one file.
Returns the file path of the extracted file.
"""
with ZipFile(zip_file_path) as zip_file:
zip_files = zip_file.namelist()
file_count = len(zip_files)
if file_count < 1:
raise RuntimeError("No files found in zip archive")
if file_count > 1:
raise NotImplementedError("Expected no more than one file in zip archive")
return zip_file.extract(zip_files[0], path=destination_directory_path)
| 39.947368
| 100
| 0.711462
|
aa42e1d37a97d9b06309b795e6be2fb88203111c
| 2,221
|
py
|
Python
|
nnrl/nn/init.py
|
0xangelo/nnrl
|
559d36b6d17425d9d657c1dffbcf070955f73afb
|
[
"MIT"
] | null | null | null |
nnrl/nn/init.py
|
0xangelo/nnrl
|
559d36b6d17425d9d657c1dffbcf070955f73afb
|
[
"MIT"
] | 7
|
2021-11-01T12:03:41.000Z
|
2022-01-08T14:55:49.000Z
|
nnrl/nn/init.py
|
angelolovatto/nnrl
|
c925af1c6ecc6e2e999b782935f7e2c7dee1ba81
|
[
"MIT"
] | null | null | null |
"""Utilities for module initialization."""
import functools
import inspect
from typing import Callable, Optional, Union
from torch import Tensor, nn
def get_initializer(name: Optional[str]) -> Callable[[Tensor], None]:
"""Return initializer function given its name.
Arguments:
name: The initializer function's name. If None, returns a no-op callable
"""
if name is None:
return lambda _: None
name_ = name + "_"
if name in dir(nn.init) and name_ in dir(nn.init):
func = getattr(nn.init, name_)
return func
raise ValueError(f"Couldn't find initializer with name '{name}'")
NONLINEARITY_MAP = {
"Sigmoid": "sigmoid",
"Tanh": "tanh",
"ReLU": "relu",
"ELU": "relu",
"LeakyReLU": "leaky_relu",
}
def initialize_(
name: Optional[str] = None, activation: Union[str, dict] = None, **options
) -> Callable[[nn.Module], None]:
"""Return a callable to apply an initializer with the given name and options.
If `gain` is part of the initializer's argspec and is not specified in options,
the recommended value from `torch.nn.init.calculate_gain` is used.
Arguments:
name: Initializer function name
activation: Optional specification of the activation function that
follows linear layers
**options: Keyword arguments to pass to the initializer
Returns:
A callable to be used with `nn.Module.apply`.
"""
initializer = get_initializer(name)
if isinstance(activation, dict):
activation = activation["name"]
options.update(activation.get("options", {}))
if (
activation in NONLINEARITY_MAP
and "gain" not in options
and "gain" in inspect.signature(initializer).parameters
):
recommended_gain = nn.init.calculate_gain(
NONLINEARITY_MAP[activation], param=options.get("negative_slope")
)
options["gain"] = recommended_gain
func_ = functools.partial(initializer, **options)
def init(module):
if isinstance(module, nn.Linear):
func_(module.weight)
if module.bias is not None:
nn.init.constant_(module.bias, 0)
return init
| 29.223684
| 83
| 0.649707
|
05c9f3fb21bb4334040e88b84df62a7aca97026a
| 1,121
|
py
|
Python
|
blog/code/sl01.py
|
rodluger/rodluger.github.io
|
1639d1d299901855a56415cce056b3640093ccb0
|
[
"MIT"
] | 2
|
2020-06-30T14:32:02.000Z
|
2021-03-01T07:10:28.000Z
|
blog/code/sl01.py
|
rodluger/rodluger.github.io
|
1639d1d299901855a56415cce056b3640093ccb0
|
[
"MIT"
] | 2
|
2017-09-07T20:12:55.000Z
|
2020-09-18T19:03:16.000Z
|
blog/code/sl01.py
|
rodluger/rodluger.github.io
|
1639d1d299901855a56415cce056b3640093ccb0
|
[
"MIT"
] | 4
|
2018-11-28T12:19:51.000Z
|
2022-03-08T20:03:43.000Z
|
import matplotlib.pyplot as plt
# True values
alpha_true = 2.0
beta_true = 0.33
# Array of alpha values to test
alpha_arr = np.linspace(1.5, 2.5, 1000)
# Let's simulate 50 different `y` datasets, each with k=1000 points
np.random.seed(0)
for k in range(50):
# Draw our data: this is what we observe
y = Beta.rvs(alpha_true, beta_true, size=1000)
# Compute the log likelihood using the true likelihood function
# Exponentiate it and divide by the evidence to get the posterior
ll = np.array([log_likelihood(alpha, beta_true, y) for alpha in alpha_arr])
ll -= np.nanmax(ll)
pdf = np.exp(ll) / np.trapz(np.exp(ll))
# Now do the same thing using the synthetic likelihood function
ll_sl = np.array(
[log_likelihood_synthetic(alpha, beta_true, y) for alpha in alpha_arr]
)
ll_sl -= np.nanmax(ll_sl)
pdf_sl = np.exp(ll_sl) / np.trapz(np.exp(ll_sl))
# Plot the two distributions
plt.plot(alpha_arr, pdf, color="C0", alpha=0.25, lw=1)
plt.plot(alpha_arr, pdf_sl, color="C1", alpha=0.25, lw=1)
plt.axvline(alpha_true, color="k", lw=1, ls="--", alpha=0.5)
| 32.028571
| 79
| 0.681534
|
a22039d76982005643bb6c34a53658b65e9595b7
| 354
|
py
|
Python
|
rllib/agents/dreamer/__init__.py
|
willfrey/ray
|
288a81b42ef0186ab4db33b30191614a7bdb69f6
|
[
"Apache-2.0"
] | 1
|
2019-06-19T02:23:43.000Z
|
2019-06-19T02:23:43.000Z
|
rllib/agents/dreamer/__init__.py
|
willfrey/ray
|
288a81b42ef0186ab4db33b30191614a7bdb69f6
|
[
"Apache-2.0"
] | 73
|
2021-09-25T07:11:39.000Z
|
2022-03-26T07:10:59.000Z
|
rllib/agents/dreamer/__init__.py
|
willfrey/ray
|
288a81b42ef0186ab4db33b30191614a7bdb69f6
|
[
"Apache-2.0"
] | 1
|
2019-09-24T16:24:49.000Z
|
2019-09-24T16:24:49.000Z
|
from ray.rllib.algorithms.dreamer.dreamer import (
DREAMERConfig,
DREAMERTrainer,
DEFAULT_CONFIG,
)
__all__ = [
"DREAMERConfig",
"DREAMERTrainer",
"DEFAULT_CONFIG",
]
from ray.rllib.utils.deprecation import deprecation_warning
deprecation_warning(
"ray.rllib.agents.dreamer", "ray.rllib.algorithms.dreamer", error=False
)
| 18.631579
| 75
| 0.731638
|
66f169c05924909072ffed20a5a4bb3117615e1a
| 1,494
|
py
|
Python
|
uavcan_gui_tool/thirdparty/pyqtgraph/widgets/PathButton.py
|
PonomarevDA/inno_uavcan_VTOL_interface
|
76bea66096f1eff72ccc8302b15b6844a66edb2d
|
[
"MIT"
] | null | null | null |
uavcan_gui_tool/thirdparty/pyqtgraph/widgets/PathButton.py
|
PonomarevDA/inno_uavcan_VTOL_interface
|
76bea66096f1eff72ccc8302b15b6844a66edb2d
|
[
"MIT"
] | null | null | null |
uavcan_gui_tool/thirdparty/pyqtgraph/widgets/PathButton.py
|
PonomarevDA/inno_uavcan_VTOL_interface
|
76bea66096f1eff72ccc8302b15b6844a66edb2d
|
[
"MIT"
] | null | null | null |
from .. import functions as fn
from ..Qt import QtGui, QtCore
__all__ = ['PathButton']
class PathButton(QtGui.QPushButton):
"""Simple PushButton extension which paints a QPainterPath on its face"""
def __init__(self, parent=None, path=None, pen='default', brush=None, size=(30, 30)):
QtGui.QPushButton.__init__(self, parent)
self.path = None
if pen == 'default':
pen = 'k'
self.setPen(pen)
self.setBrush(brush)
if path is not None:
self.setPath(path)
if size is not None:
self.setFixedWidth(size[0])
self.setFixedHeight(size[1])
def setBrush(self, brush):
self.brush = fn.mkBrush(brush)
def setPen(self, *args, **kwargs):
self.pen = fn.mkPen(*args, **kwargs)
def setPath(self, path):
self.path = path
self.update()
def paintEvent(self, ev):
QtGui.QPushButton.paintEvent(self, ev)
margin = 7
geom = QtCore.QRectF(0, 0, self.width(), self.height()).adjusted(margin, margin, -margin, -margin)
rect = self.path.boundingRect()
scale = min(geom.width() / float(rect.width()), geom.height() / float(rect.height()))
p = QtGui.QPainter(self)
p.setRenderHint(p.Antialiasing)
p.translate(geom.center())
p.scale(scale, scale)
p.translate(-rect.center())
p.setPen(self.pen)
p.setBrush(self.brush)
p.drawPath(self.path)
p.end()
| 31.125
| 106
| 0.592369
|
d219aec9c6c6b46d61cf5c219d90755dd4dae112
| 386
|
py
|
Python
|
users/migrations/0004_profile_status.py
|
dizzyplay/djangoboard
|
99eb7cedca2772d78577974051b78dd522b90bd3
|
[
"MIT"
] | null | null | null |
users/migrations/0004_profile_status.py
|
dizzyplay/djangoboard
|
99eb7cedca2772d78577974051b78dd522b90bd3
|
[
"MIT"
] | 6
|
2020-02-11T23:38:07.000Z
|
2021-09-08T00:42:36.000Z
|
users/migrations/0004_profile_status.py
|
dizzyplay/djangoboard
|
99eb7cedca2772d78577974051b78dd522b90bd3
|
[
"MIT"
] | 1
|
2019-03-21T17:43:42.000Z
|
2019-03-21T17:43:42.000Z
|
# Generated by Django 2.1.2 on 2018-11-13 14:11
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0003_auto_20181113_2112'),
]
operations = [
migrations.AddField(
model_name='profile',
name='status',
field=models.BooleanField(default=False),
),
]
| 20.315789
| 53
| 0.598446
|
4cd714fe71c8a4a589db895b747713b2d2d444a0
| 115
|
py
|
Python
|
compressible_sr/problems/__init__.py
|
zooechiu/pyro2
|
51874476e9c3c3c412c66850ab819ca70af0b20c
|
[
"BSD-3-Clause"
] | 151
|
2018-08-14T12:52:22.000Z
|
2022-03-29T07:57:01.000Z
|
compressible_sr/problems/__init__.py
|
gfjykldd/pyro2
|
b0ca4aa7b1b0f0d445c6a8d0ab63fcc0bc8a431c
|
[
"BSD-3-Clause"
] | 40
|
2015-03-25T15:45:44.000Z
|
2018-07-30T18:48:47.000Z
|
compressible_sr/problems/__init__.py
|
gfjykldd/pyro2
|
b0ca4aa7b1b0f0d445c6a8d0ab63fcc0bc8a431c
|
[
"BSD-3-Clause"
] | 56
|
2018-10-10T16:54:59.000Z
|
2022-02-06T08:48:52.000Z
|
__all__ = ['acoustic_pulse', 'advect', 'bubble', 'hse', 'kh', 'logo', 'quad', 'rt', 'rt2', 'sedov', 'sod', 'test']
| 57.5
| 114
| 0.547826
|
898bbf833b6abc7181ce6173fac8c1816a50d18d
| 7,696
|
py
|
Python
|
tests/common/checks/test_base_check_registry.py
|
antonblr/checkov
|
9415c6593c537945c08f7a19f28bdd8b96966f67
|
[
"Apache-2.0"
] | 3
|
2021-04-19T17:17:21.000Z
|
2021-09-06T06:31:09.000Z
|
tests/common/checks/test_base_check_registry.py
|
antonblr/checkov
|
9415c6593c537945c08f7a19f28bdd8b96966f67
|
[
"Apache-2.0"
] | 16
|
2021-03-09T07:38:38.000Z
|
2021-06-09T03:53:55.000Z
|
tests/common/checks/test_base_check_registry.py
|
antonblr/checkov
|
9415c6593c537945c08f7a19f28bdd8b96966f67
|
[
"Apache-2.0"
] | 1
|
2021-03-07T07:23:39.000Z
|
2021-03-07T07:23:39.000Z
|
import unittest
from checkov.common.checks.base_check import BaseCheck
from checkov.common.checks.base_check_registry import BaseCheckRegistry
class TestCheck(BaseCheck):
def __init__(self, *supported_entities, id="CKV_T_1"):
name = "Example check"
categories = []
supported_entities = list(supported_entities)
block_type = "module"
super().__init__(name=name, id=id, categories=categories, supported_entities=supported_entities,
block_type=block_type)
def scan_entity_conf(self, conf, entity_type):
pass
# noinspection DuplicatedCode
class TestRunnerRegistry(unittest.TestCase):
def test_add_non_wildcard(self):
registry = BaseCheckRegistry()
resource_1_check = TestCheck("resource_1")
registry.register(resource_1_check)
checks = registry.get_checks("resource_1")
self.assertEqual(1, len(checks))
self.assertEqual(resource_1_check, checks[0])
self.assertIn("resource_1", registry.checks)
self.assertNotIn("resource_1", registry.wildcard_checks)
def test_add_wildcard(self):
registry = BaseCheckRegistry()
resource_s_check = TestCheck("resource_*")
registry.register(resource_s_check)
checks = registry.get_checks("resource_*")
self.assertEqual(1, len(checks))
self.assertEqual(resource_s_check, checks[0])
self.assertNotIn("resource_*", registry.checks)
self.assertIn("resource_*", registry.wildcard_checks)
def test__is_wildcard(self):
self.assertFalse(BaseCheckRegistry._is_wildcard(""))
self.assertFalse(BaseCheckRegistry._is_wildcard("resource"))
self.assertFalse(BaseCheckRegistry._is_wildcard("module"))
self.assertFalse(BaseCheckRegistry._is_wildcard("aws_s3_bucket"))
self.assertTrue(BaseCheckRegistry._is_wildcard("aws_*"))
self.assertTrue(BaseCheckRegistry._is_wildcard("*"))
self.assertTrue(BaseCheckRegistry._is_wildcard("aws_[^0-9]"))
def test_get_check_by_id(self):
registry = BaseCheckRegistry()
resource_1_check = TestCheck("resource_1", id="CKV_T_1")
resource_2_check = TestCheck("resource_2", id="CKV_T_2")
resource_as_check = TestCheck("resource_a*", id="CKV_T_3")
resource_bs_check = TestCheck("resource_b*", id="CKV_T_4")
registry.register(resource_1_check)
registry.register(resource_2_check)
registry.register(resource_as_check)
registry.register(resource_bs_check)
self.assertEqual(resource_1_check, registry.get_check_by_id("CKV_T_1"))
self.assertEqual(resource_2_check, registry.get_check_by_id("CKV_T_2"))
self.assertEqual(resource_as_check, registry.get_check_by_id("CKV_T_3"))
self.assertEqual(resource_bs_check, registry.get_check_by_id("CKV_T_4"))
self.assertIsNone(registry.get_check_by_id("CKV_T_5"))
def test_get_check_no_wildcard(self):
registry = BaseCheckRegistry()
resource_1_check = TestCheck("resource_1", id="CKV_T_1")
resource_2_check1 = TestCheck("resource_2", id="CKV_T_2")
resource_2_check2 = TestCheck("resource_2", id="CKV_T_3")
registry.register(resource_1_check)
registry.register(resource_2_check1)
registry.register(resource_2_check2)
resource_1_checks = registry.get_checks("resource_1")
self.assertEqual(1, len(resource_1_checks))
self.assertEqual(resource_1_check, resource_1_checks[0])
resource_2_checks = registry.get_checks("resource_2")
self.assertEqual(2, len(resource_2_checks))
self.assertIn(resource_2_check1, resource_2_checks)
self.assertIn(resource_2_check2, resource_2_checks)
self.assertEqual(0, len(registry.get_checks("resource")))
self.assertEqual(0, len(registry.get_checks("resource_10")))
def test_get_check_wildcard(self):
registry = BaseCheckRegistry()
resource_s_check = TestCheck("resource_*", id="CKV_T_1")
resource_as_check = TestCheck("resource_a*", id="CKV_T_2")
s_check = TestCheck("*", id="CKV_T_3")
s_2_check = TestCheck("*_2", id="CKV_T_4")
registry.register(resource_s_check)
registry.register(resource_as_check)
registry.register(s_check)
registry.register(s_2_check)
resource_1_checks = registry.get_checks("resource_1")
self.assertEqual(2, len(resource_1_checks))
self.assertIn(s_check, resource_1_checks)
self.assertIn(resource_s_check, resource_1_checks)
resource_2_checks = registry.get_checks("resource_2")
self.assertEqual(3, len(resource_2_checks))
self.assertIn(s_check, resource_2_checks)
self.assertIn(s_2_check, resource_2_checks)
self.assertIn(resource_s_check, resource_2_checks)
resource__checks = registry.get_checks("resource_")
self.assertEqual(2, len(resource__checks))
self.assertIn(s_check, resource__checks)
self.assertIn(resource_s_check, resource__checks)
resource_abc_checks = registry.get_checks("resource_abc")
self.assertEqual(3, len(resource_abc_checks))
self.assertIn(s_check, resource_abc_checks)
self.assertIn(resource_s_check, resource_abc_checks)
self.assertIn(resource_as_check, resource_abc_checks)
r_checks = registry.get_checks("r")
self.assertEqual(1, len(r_checks))
self.assertIn(s_check, r_checks)
resource_checks = registry.get_checks("resource")
self.assertEqual(1, len(resource_checks))
self.assertIn(s_check, resource_checks)
resource_checks = registry.get_checks("resource_ABC")
self.assertEqual(2, len(resource_checks))
self.assertIn(s_check, resource_checks)
self.assertIn(resource_s_check, resource_checks)
def test_get_check_mixed(self):
registry = BaseCheckRegistry()
resource_1_check = TestCheck("resource_1", id="CKV_T_1")
resource_2_check = TestCheck("resource_2", id="CKV_T_2")
resource_s_check = TestCheck("resource_*", id="CKV_T_4")
resource_as_check = TestCheck("resource_a*", id="CKV_T_3")
s_check = TestCheck("*", id="CKV_T_4")
s_2_check = TestCheck("*_2", id="CKV_T_5")
registry.register(resource_1_check)
registry.register(resource_2_check)
registry.register(resource_s_check)
registry.register(resource_as_check)
registry.register(s_check)
registry.register(s_2_check)
resource_1_checks = registry.get_checks("resource_1")
self.assertEqual(3, len(resource_1_checks))
self.assertIn(s_check, resource_1_checks)
self.assertIn(resource_1_check, resource_1_checks)
self.assertIn(resource_s_check, resource_1_checks)
resource_10_checks = registry.get_checks("resource_10")
self.assertEqual(2, len(resource_10_checks))
self.assertIn(s_check, resource_10_checks)
self.assertIn(resource_s_check, resource_10_checks)
resource_2_checks = registry.get_checks("resource_2")
self.assertEqual(4, len(resource_2_checks))
self.assertIn(s_check, resource_2_checks)
self.assertIn(s_2_check, resource_2_checks)
self.assertIn(resource_2_check, resource_2_checks)
self.assertIn(resource_s_check, resource_2_checks)
resource__checks = registry.get_checks("resource_")
self.assertEqual(2, len(resource__checks))
self.assertIn(s_check, resource__checks)
self.assertIn(resource_s_check, resource__checks)
if __name__ == '__main__':
unittest.main()
| 42.755556
| 104
| 0.706861
|
ad1ad77bbbdde292239d8235d09157585fff37b8
| 1,580
|
py
|
Python
|
SDK/tld-0.7.1/setup.py
|
Gr1ph00n/staticwebanalyzer
|
8bf6337a77192b85913d75778830ccbb9006081f
|
[
"MIT"
] | null | null | null |
SDK/tld-0.7.1/setup.py
|
Gr1ph00n/staticwebanalyzer
|
8bf6337a77192b85913d75778830ccbb9006081f
|
[
"MIT"
] | null | null | null |
SDK/tld-0.7.1/setup.py
|
Gr1ph00n/staticwebanalyzer
|
8bf6337a77192b85913d75778830ccbb9006081f
|
[
"MIT"
] | null | null | null |
import os
from setuptools import setup, find_packages
try:
readme = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
except:
readme = ''
version = '0.7.1'
data_dir = "src/tld/res"
data = [os.path.join(data_dir, f) for f in os.listdir(data_dir)]
setup(
name = 'tld',
version = version,
description = ("Extracts the top level domain (TLD) from the URL given."),
long_description = readme,
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Development Status :: 5 - Production/Stable",
"Topic :: Internet",
"License :: OSI Approved :: Mozilla Public License 1.1 (MPL 1.1)",
"License :: OSI Approved :: GNU General Public License v2 (GPLv2)",
"License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)",
],
keywords = 'tld, top level domain names, python',
author = 'Artur Barseghyan',
author_email = 'artur.barseghyan@gmail.com',
url = 'https://github.com/barseghyanartur/tld',
package_dir = {'':'src'},
packages = find_packages(where='./src'),
package_data = {'tld': data},
include_package_data = True,
license = 'MPL 1.1/GPL 2.0/LGPL 2.1',
install_requires = [
'six>=1.4'
]
)
| 33.617021
| 93
| 0.614557
|
726f4c856319c949e18456be548d9ffd015cc6a6
| 7,861
|
py
|
Python
|
problems/euler013.py
|
branning/euler
|
b9f53cdca4b246fab45a8ea4a19f432e7f733594
|
[
"MIT"
] | null | null | null |
problems/euler013.py
|
branning/euler
|
b9f53cdca4b246fab45a8ea4a19f432e7f733594
|
[
"MIT"
] | null | null | null |
problems/euler013.py
|
branning/euler
|
b9f53cdca4b246fab45a8ea4a19f432e7f733594
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
numbers_str = ["37107287533902102798797998220837590246510135740250",
"46376937677490009712648124896970078050417018260538",
"74324986199524741059474233309513058123726617309629",
"91942213363574161572522430563301811072406154908250",
"23067588207539346171171980310421047513778063246676",
"89261670696623633820136378418383684178734361726757",
"28112879812849979408065481931592621691275889832738",
"44274228917432520321923589422876796487670272189318",
"47451445736001306439091167216856844588711603153276",
"70386486105843025439939619828917593665686757934951",
"62176457141856560629502157223196586755079324193331",
"64906352462741904929101432445813822663347944758178",
"92575867718337217661963751590579239728245598838407",
"58203565325359399008402633568948830189458628227828",
"80181199384826282014278194139940567587151170094390",
"35398664372827112653829987240784473053190104293586",
"86515506006295864861532075273371959191420517255829",
"71693888707715466499115593487603532921714970056938",
"54370070576826684624621495650076471787294438377604",
"53282654108756828443191190634694037855217779295145",
"36123272525000296071075082563815656710885258350721",
"45876576172410976447339110607218265236877223636045",
"17423706905851860660448207621209813287860733969412",
"81142660418086830619328460811191061556940512689692",
"51934325451728388641918047049293215058642563049483",
"62467221648435076201727918039944693004732956340691",
"15732444386908125794514089057706229429197107928209",
"55037687525678773091862540744969844508330393682126",
"18336384825330154686196124348767681297534375946515",
"80386287592878490201521685554828717201219257766954",
"78182833757993103614740356856449095527097864797581",
"16726320100436897842553539920931837441497806860984",
"48403098129077791799088218795327364475675590848030",
"87086987551392711854517078544161852424320693150332",
"59959406895756536782107074926966537676326235447210",
"69793950679652694742597709739166693763042633987085",
"41052684708299085211399427365734116182760315001271",
"65378607361501080857009149939512557028198746004375",
"35829035317434717326932123578154982629742552737307",
"94953759765105305946966067683156574377167401875275",
"88902802571733229619176668713819931811048770190271",
"25267680276078003013678680992525463401061632866526",
"36270218540497705585629946580636237993140746255962",
"24074486908231174977792365466257246923322810917141",
"91430288197103288597806669760892938638285025333403",
"34413065578016127815921815005561868836468420090470",
"23053081172816430487623791969842487255036638784583",
"11487696932154902810424020138335124462181441773470",
"63783299490636259666498587618221225225512486764533",
"67720186971698544312419572409913959008952310058822",
"95548255300263520781532296796249481641953868218774",
"76085327132285723110424803456124867697064507995236",
"37774242535411291684276865538926205024910326572967",
"23701913275725675285653248258265463092207058596522",
"29798860272258331913126375147341994889534765745501",
"18495701454879288984856827726077713721403798879715",
"38298203783031473527721580348144513491373226651381",
"34829543829199918180278916522431027392251122869539",
"40957953066405232632538044100059654939159879593635",
"29746152185502371307642255121183693803580388584903",
"41698116222072977186158236678424689157993532961922",
"62467957194401269043877107275048102390895523597457",
"23189706772547915061505504953922979530901129967519",
"86188088225875314529584099251203829009407770775672",
"11306739708304724483816533873502340845647058077308",
"82959174767140363198008187129011875491310547126581",
"97623331044818386269515456334926366572897563400500",
"42846280183517070527831839425882145521227251250327",
"55121603546981200581762165212827652751691296897789",
"32238195734329339946437501907836945765883352399886",
"75506164965184775180738168837861091527357929701337",
"62177842752192623401942399639168044983993173312731",
"32924185707147349566916674687634660915035914677504",
"99518671430235219628894890102423325116913619626622",
"73267460800591547471830798392868535206946944540724",
"76841822524674417161514036427982273348055556214818",
"97142617910342598647204516893989422179826088076852",
"87783646182799346313767754307809363333018982642090",
"10848802521674670883215120185883543223812876952786",
"71329612474782464538636993009049310363619763878039",
"62184073572399794223406235393808339651327408011116",
"66627891981488087797941876876144230030984490851411",
"60661826293682836764744779239180335110989069790714",
"85786944089552990653640447425576083659976645795096",
"66024396409905389607120198219976047599490197230297",
"64913982680032973156037120041377903785566085089252",
"16730939319872750275468906903707539413042652315011",
"94809377245048795150954100921645863754710598436791",
"78639167021187492431995700641917969777599028300699",
"15368713711936614952811305876380278410754449733078",
"40789923115535562561142322423255033685442488917353",
"44889911501440648020369068063960672322193204149535",
"41503128880339536053299340368006977710650566631954",
"81234880673210146739058568557934581403627822703280",
"82616570773948327592232845941706525094512325230608",
"22918802058777319719839450180888072429661980811197",
"77158542502016545090413245809786882778948721859617",
"72107838435069186155435662884062257473692284509516",
"20849603980134001723930671666823555245252804609722",
"53503534226472524250874054075591789781264330331690"]
def sum_str(numbers):
sum_str = []
carry = 0
for digit in range(-1, -1-1*len(numbers_str[0]), -1):
digit_sum = carry
digit_sum += sum( [int(number[digit]) for number in numbers_str] )
sum_str.append(str(digit_sum)[-1])
carry = digit_sum / 10
carry_str = str(carry)
for digit in range(-1, -1-1*len(carry_str), -1):
sum_str.append(carry_str[digit])
sum_str.reverse()
return sum_str
if __name__=="__main__":
debugging = False
n = 10
total = sum_str(numbers_str)
if debugging:
print ''.join(total)
print "first {} characters: {}".format(n, ''.join(total[0:10]))
else:
print ''.join(total[0:10])
| 60.007634
| 75
| 0.691897
|
0805d729a4459aaf1a4406e550463a0fea734ab2
| 2,711
|
py
|
Python
|
preprocessing_tools/smarteye_gaze.py
|
tkerwin/pydre
|
a100779801ffdc9855f09328bd28bf79ab6e679b
|
[
"Apache-2.0"
] | null | null | null |
preprocessing_tools/smarteye_gaze.py
|
tkerwin/pydre
|
a100779801ffdc9855f09328bd28bf79ab6e679b
|
[
"Apache-2.0"
] | null | null | null |
preprocessing_tools/smarteye_gaze.py
|
tkerwin/pydre
|
a100779801ffdc9855f09328bd28bf79ab6e679b
|
[
"Apache-2.0"
] | null | null | null |
import argparse
import os
import sys
import glob
import numpy as np
import re
import pandas
from pandas.api.types import CategoricalDtype
def summarizeGazeBlock(block, timeColName="VidTime", gazeColName="ESTIMATED_CLOSEST_WORLD_INTERSECTION"):
# find the gaze sequence in block, which has a vidTime column and a gaze column
block = pandas.DataFrame(block, columns=([timeColName, gazeColName]))
cat_type = CategoricalDtype(categories=['None', 'car.dashPlane', 'car.WindScreen'])
block[gazeColName] = block[gazeColName].astype(cat_type)
block[timeColName] = pandas.to_timedelta(block[timeColName], unit="s")
block.set_index(timeColName, inplace=True)
# filter out noise from the gaze column
# SAE J2396 defines fixations as at least 0.2 seconds,
min_delta = pandas.to_timedelta(0.2, unit='s')
# so we ignore changes in gaze that are less than that
# find list of runs
block['gazenum'] = (block[gazeColName].shift(1) != block[gazeColName]).astype(int).cumsum()
durations = block.reset_index().groupby('gazenum').max() - block.reset_index().groupby('gazenum').min()
n = block['gazenum'].max()
block = block.reset_index()
for x in range(n):
if durations.iloc[x][timeColName] < min_delta:
block.loc[block['gazenum'] == x, gazeColName] = np.nan
block.fillna(method='bfill', inplace=True)
return block
def runFiles(files):
total_results = []
for filename in files:
d = pandas.read_csv(filename, sep='\s+', na_values='.')
datafile_re = re.compile("([^_]+)_Sub_(\d+)_Drive_(\d+)(?:.*).dat")
match = datafile_re.search(filename)
experiment_name, subject_id, drive_id = match.groups()
print("Running subject {}, drive {}".format(subject_id, drive_id))
results = ecoCar(d)
print("Got {} results.".format(len(results)))
for (startTime, warning, rtTime) in results:
total_results.append((subject_id, warning, rtTime, startTime))
return total_results
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-f","--file", type= str, help="input dat file", required = True)
parser.add_argument("-s","--start", type= float, help="start video timecode", required = True)
parser.add_argument("-e","--end", type= float, help="end video timecode", required = True)
args = parser.parse_args()
print(file_list)
results = runFiles(file_list)
with open('tasks_out.csv', 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(("subject", "warning", "reactionTime", "startTime"))
writer.writerows(results)
if __name__ == "__main__":
main()
| 38.183099
| 107
| 0.673552
|
8b71b0fbe20166a8e0b0be028bffff240e40a3fa
| 14,371
|
py
|
Python
|
assignments/2020/assignment2_colab/assignment2/cs231n/classifiers/fc_net.py
|
benkmoore/cs231n.github.io
|
143e8864aed5ebe6b1ffaa407faf58d60de0adb9
|
[
"MIT"
] | 10
|
2020-09-14T09:54:05.000Z
|
2022-02-27T17:46:24.000Z
|
assignments/2020/assignment2_colab/assignment2/cs231n/classifiers/fc_net.py
|
benkmoore/cs231n.github.io
|
143e8864aed5ebe6b1ffaa407faf58d60de0adb9
|
[
"MIT"
] | 32
|
2020-09-17T19:43:53.000Z
|
2022-03-12T00:55:26.000Z
|
assignments/2020/assignment2_colab/assignment2/cs231n/classifiers/fc_net.py
|
benkmoore/cs231n.github.io
|
143e8864aed5ebe6b1ffaa407faf58d60de0adb9
|
[
"MIT"
] | 4
|
2020-11-26T14:59:58.000Z
|
2022-01-31T08:23:13.000Z
|
from builtins import range
from builtins import object
import numpy as np
from ..layers import *
from ..layer_utils import *
class TwoLayerNet(object):
"""
A two-layer fully-connected neural network with ReLU nonlinearity and
softmax loss that uses a modular layer design. We assume an input dimension
of D, a hidden dimension of H, and perform classification over C classes.
The architecure should be affine - relu - affine - softmax.
Note that this class does not implement gradient descent; instead, it
will interact with a separate Solver object that is responsible for running
optimization.
The learnable parameters of the model are stored in the dictionary
self.params that maps parameter names to numpy arrays.
"""
def __init__(
self,
input_dim=3 * 32 * 32,
hidden_dim=100,
num_classes=10,
weight_scale=1e-3,
reg=0.0,
):
"""
Initialize a new network.
Inputs:
- input_dim: An integer giving the size of the input
- hidden_dim: An integer giving the size of the hidden layer
- num_classes: An integer giving the number of classes to classify
- weight_scale: Scalar giving the standard deviation for random
initialization of the weights.
- reg: Scalar giving L2 regularization strength.
"""
self.params = {}
self.reg = reg
############################################################################
# TODO: Initialize the weights and biases of the two-layer net. Weights #
# should be initialized from a Gaussian centered at 0.0 with #
# standard deviation equal to weight_scale, and biases should be #
# initialized to zero. All weights and biases should be stored in the #
# dictionary self.params, with first layer weights #
# and biases using the keys 'W1' and 'b1' and second layer #
# weights and biases using the keys 'W2' and 'b2'. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
def loss(self, X, y=None):
"""
Compute loss and gradient for a minibatch of data.
Inputs:
- X: Array of input data of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,). y[i] gives the label for X[i].
Returns:
If y is None, then run a test-time forward pass of the model and return:
- scores: Array of shape (N, C) giving classification scores, where
scores[i, c] is the classification score for X[i] and class c.
If y is not None, then run a training-time forward and backward pass and
return a tuple of:
- loss: Scalar value giving the loss
- grads: Dictionary with the same keys as self.params, mapping parameter
names to gradients of the loss with respect to those parameters.
"""
scores = None
############################################################################
# TODO: Implement the forward pass for the two-layer net, computing the #
# class scores for X and storing them in the scores variable. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
# If y is None then we are in test mode so just return scores
if y is None:
return scores
loss, grads = 0, {}
############################################################################
# TODO: Implement the backward pass for the two-layer net. Store the loss #
# in the loss variable and gradients in the grads dictionary. Compute data #
# loss using softmax, and make sure that grads[k] holds the gradients for #
# self.params[k]. Don't forget to add L2 regularization! #
# #
# NOTE: To ensure that your implementation matches ours and you pass the #
# automated tests, make sure that your L2 regularization includes a factor #
# of 0.5 to simplify the expression for the gradient. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
return loss, grads
class FullyConnectedNet(object):
"""
A fully-connected neural network with an arbitrary number of hidden layers,
ReLU nonlinearities, and a softmax loss function. This will also implement
dropout and batch/layer normalization as options. For a network with L layers,
the architecture will be
{affine - [batch/layer norm] - relu - [dropout]} x (L - 1) - affine - softmax
where batch/layer normalization and dropout are optional, and the {...} block is
repeated L - 1 times.
Similar to the TwoLayerNet above, learnable parameters are stored in the
self.params dictionary and will be learned using the Solver class.
"""
def __init__(
self,
hidden_dims,
input_dim=3 * 32 * 32,
num_classes=10,
dropout=1,
normalization=None,
reg=0.0,
weight_scale=1e-2,
dtype=np.float32,
seed=None,
):
"""
Initialize a new FullyConnectedNet.
Inputs:
- hidden_dims: A list of integers giving the size of each hidden layer.
- input_dim: An integer giving the size of the input.
- num_classes: An integer giving the number of classes to classify.
- dropout: Scalar between 0 and 1 giving dropout strength. If dropout=1 then
the network should not use dropout at all.
- normalization: What type of normalization the network should use. Valid values
are "batchnorm", "layernorm", or None for no normalization (the default).
- reg: Scalar giving L2 regularization strength.
- weight_scale: Scalar giving the standard deviation for random
initialization of the weights.
- dtype: A numpy datatype object; all computations will be performed using
this datatype. float32 is faster but less accurate, so you should use
float64 for numeric gradient checking.
- seed: If not None, then pass this random seed to the dropout layers. This
will make the dropout layers deteriminstic so we can gradient check the
model.
"""
self.normalization = normalization
self.use_dropout = dropout != 1
self.reg = reg
self.num_layers = 1 + len(hidden_dims)
self.dtype = dtype
self.params = {}
############################################################################
# TODO: Initialize the parameters of the network, storing all values in #
# the self.params dictionary. Store weights and biases for the first layer #
# in W1 and b1; for the second layer use W2 and b2, etc. Weights should be #
# initialized from a normal distribution centered at 0 with standard #
# deviation equal to weight_scale. Biases should be initialized to zero. #
# #
# When using batch normalization, store scale and shift parameters for the #
# first layer in gamma1 and beta1; for the second layer use gamma2 and #
# beta2, etc. Scale parameters should be initialized to ones and shift #
# parameters should be initialized to zeros. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
# When using dropout we need to pass a dropout_param dictionary to each
# dropout layer so that the layer knows the dropout probability and the mode
# (train / test). You can pass the same dropout_param to each dropout layer.
self.dropout_param = {}
if self.use_dropout:
self.dropout_param = {"mode": "train", "p": dropout}
if seed is not None:
self.dropout_param["seed"] = seed
# With batch normalization we need to keep track of running means and
# variances, so we need to pass a special bn_param object to each batch
# normalization layer. You should pass self.bn_params[0] to the forward pass
# of the first batch normalization layer, self.bn_params[1] to the forward
# pass of the second batch normalization layer, etc.
self.bn_params = []
if self.normalization == "batchnorm":
self.bn_params = [{"mode": "train"} for i in range(self.num_layers - 1)]
if self.normalization == "layernorm":
self.bn_params = [{} for i in range(self.num_layers - 1)]
# Cast all parameters to the correct datatype
for k, v in self.params.items():
self.params[k] = v.astype(dtype)
def loss(self, X, y=None):
"""
Compute loss and gradient for the fully-connected net.
Input / output: Same as TwoLayerNet above.
"""
X = X.astype(self.dtype)
mode = "test" if y is None else "train"
# Set train/test mode for batchnorm params and dropout param since they
# behave differently during training and testing.
if self.use_dropout:
self.dropout_param["mode"] = mode
if self.normalization == "batchnorm":
for bn_param in self.bn_params:
bn_param["mode"] = mode
scores = None
############################################################################
# TODO: Implement the forward pass for the fully-connected net, computing #
# the class scores for X and storing them in the scores variable. #
# #
# When using dropout, you'll need to pass self.dropout_param to each #
# dropout forward pass. #
# #
# When using batch normalization, you'll need to pass self.bn_params[0] to #
# the forward pass for the first batch normalization layer, pass #
# self.bn_params[1] to the forward pass for the second batch normalization #
# layer, etc. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
# If test mode return early
if mode == "test":
return scores
loss, grads = 0.0, {}
############################################################################
# TODO: Implement the backward pass for the fully-connected net. Store the #
# loss in the loss variable and gradients in the grads dictionary. Compute #
# data loss using softmax, and make sure that grads[k] holds the gradients #
# for self.params[k]. Don't forget to add L2 regularization! #
# #
# When using batch/layer normalization, you don't need to regularize the scale #
# and shift parameters. #
# #
# NOTE: To ensure that your implementation matches ours and you pass the #
# automated tests, make sure that your L2 regularization includes a factor #
# of 0.5 to simplify the expression for the gradient. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
pass
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
return loss, grads
| 49.215753
| 90
| 0.486953
|
5addefdd61797b0e9f2765e756ecdfdfee43590c
| 2,139
|
py
|
Python
|
examples/data_grid.py
|
edublancas/sklearn-model-evaluation
|
1f35d5bcc689a5f4d54c14fde60abf09af9fc374
|
[
"MIT"
] | 351
|
2016-01-27T19:15:27.000Z
|
2022-03-09T15:40:56.000Z
|
examples/data_grid.py
|
edublancas/sklearn-model-evaluation
|
1f35d5bcc689a5f4d54c14fde60abf09af9fc374
|
[
"MIT"
] | 37
|
2016-03-16T03:57:59.000Z
|
2021-06-26T14:02:33.000Z
|
examples/data_grid.py
|
edublancas/sklearn-model-evaluation
|
1f35d5bcc689a5f4d54c14fde60abf09af9fc374
|
[
"MIT"
] | 30
|
2016-01-27T19:27:08.000Z
|
2022-03-31T06:09:59.000Z
|
"""
%load_ext autoreload
%autoreload 2
"""
import matplotlib.pyplot as plt
from sklearn_evaluation.plot.matplotlib.bar import bar_groups
from sklearn_evaluation.plot.matplotlib.data_grid import DataGrid
records = [
{
'a': 1,
'b': 10,
'c': 1,
'data': 0.9
},
{
'a': 1,
'b': 10,
'c': 2,
'data': 0.91
},
{
'a': 2,
'b': 10,
'c': 1,
'data': 0.95
},
{
'a': 2,
'b': 10,
'c': 2,
'data': 0.96
},
{
'a': 5,
'b': 10,
'c': 1,
'data': 0.999999
},
{
'a': 5,
'b': 10,
'c': 2,
'data': 0.99
},
{
'a': 1,
'b': 100,
'c': 1,
'data': 0.7
},
{
'a': 1,
'b': 100,
'c': 2,
'data': 0.73
},
{
'a': 2,
'b': 100,
'c': 1,
'data': 0.77
},
{
'a': 2,
'b': 100,
'c': 2,
'data': 0.76
},
{
'a': 5,
'b': 100,
'c': 1,
'data': 0.69
},
{
'a': 5,
'b': 100,
'c': 2,
'data': 0.99
},
{
'a': 1,
'b': 1000,
'c': 1,
'data': 0.5
},
{
'a': 1,
'b': 1000,
'c': 2,
'data': 0.54
},
{
'a': 2,
'b': 1000,
'c': 1,
'data': 0.55
},
{
'a': 2,
'b': 1000,
'c': 2,
'data': 0.554
},
{
'a': 5,
'b': 1000,
'c': 1,
'data': 0.49
},
{
'a': 5,
'b': 1000,
'c': 2,
'data': 0.40
},
]
bar_groups(records,
group_by=['a', 'b'],
get_value=lambda data: data.mean(),
get_error=lambda data: data.std())
plt.show()
dg = DataGrid(records, group_by=['a', 'b'])
dg.df
list(dg.celliter())
list(dg.rowiter())
# heatmap.heatmap(records)
# import pandas as pd
# df = pd.DataFrame.from_dict(records)
# ax = sns.barplot(x="a", y="data", hue="b", data=df)
# plt.show()
| 15.278571
| 65
| 0.325386
|
a47c7a473e57a0037ae0bb3b3aae9e241f9d49e2
| 475
|
py
|
Python
|
sgce/accounts/models.py
|
patokrenzok/sgce-1
|
814b485970290727fee025798e508af107febffb
|
[
"MIT"
] | 1
|
2020-12-16T17:21:47.000Z
|
2020-12-16T17:21:47.000Z
|
sgce/accounts/models.py
|
FcoGabrielSL/sgce
|
aba719895c8516bf38342fb8bdeca5c3394ddffd
|
[
"MIT"
] | null | null | null |
sgce/accounts/models.py
|
FcoGabrielSL/sgce
|
aba719895c8516bf38342fb8bdeca5c3394ddffd
|
[
"MIT"
] | 1
|
2020-12-15T16:41:35.000Z
|
2020-12-15T16:41:35.000Z
|
from django.db import models
from django.conf import settings
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
verbose_name='usuário',
on_delete=models.CASCADE
)
phone = models.CharField('telefone', max_length=16, blank=True)
class Meta:
verbose_name = 'perfil'
verbose_name_plural = 'perfis'
def __str__(self):
return 'Perfil de {}'.format(self.user.get_full_name())
| 25
| 67
| 0.669474
|
565982178ecaad14519f3fa956f2d60c846ad876
| 186
|
py
|
Python
|
wargame/designpatterns/pythonic_dwarfmagiclocket.py
|
jeantardelli/wargameRepo
|
1e11ae40281f7eafa65ea6e40e045304b20e3824
|
[
"MIT"
] | 1
|
2020-12-01T20:30:27.000Z
|
2020-12-01T20:30:27.000Z
|
wargame/designpatterns/pythonic_dwarfmagiclocket.py
|
jeantardelli/wargameRepo
|
1e11ae40281f7eafa65ea6e40e045304b20e3824
|
[
"MIT"
] | null | null | null |
wargame/designpatterns/pythonic_dwarfmagiclocket.py
|
jeantardelli/wargameRepo
|
1e11ae40281f7eafa65ea6e40e045304b20e3824
|
[
"MIT"
] | null | null | null |
"""pythonic_dwarfmagiclocket
This module represents a dwarf magic locket object.
"""
class DwarfMagicLocket:
"""Represents an accessory for the attack of the orcs game"""
pass
| 20.666667
| 65
| 0.747312
|
c6f1adced9dddff776e216dae8c4363bd7d29d88
| 3,581
|
py
|
Python
|
robotframework-ls/src/robotframework_debug_adapter/prerun_modifiers.py
|
mrdimfox/robotframework-lsp
|
b864e25447d7035b9b8a5d72419e30c5c4510fc6
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
robotframework-ls/src/robotframework_debug_adapter/prerun_modifiers.py
|
mrdimfox/robotframework-lsp
|
b864e25447d7035b9b8a5d72419e30c5c4510fc6
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
robotframework-ls/src/robotframework_debug_adapter/prerun_modifiers.py
|
mrdimfox/robotframework-lsp
|
b864e25447d7035b9b8a5d72419e30c5c4510fc6
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
from robot.api import SuiteVisitor
import os
import json
from typing import Set, Dict, Optional
from robocorp_ls_core.robotframework_log import get_logger
log = get_logger(__name__)
class FilteringTestsSuiteVisitor(SuiteVisitor):
def __init__(
self, tests_filtering: Optional[Dict[str, Dict[str, Set[str]]]] = None
) -> None:
super().__init__()
# filename -> test names
self.include: Dict[str, Set[str]] = {}
self.exclude: Dict[str, Set[str]] = {}
self._include_contains_cache: dict = {}
self._exclude_contains_cache: dict = {}
if tests_filtering is None:
s = os.getenv("RFLS_PRERUN_FILTER_TESTS", "")
if s:
log.info("Found tests filtering: %s", s)
tests_filtering = json.loads(s)
def add(tup, container):
source, test_name = tup
source = self._normalize(source)
s = container.get(source)
if s is None:
s = container[source] = set()
s.add(test_name)
if tests_filtering:
for tup in tests_filtering.get("include", []):
add(tup, self.include)
for tup in tests_filtering.get("exclude", []):
add(tup, self.exclude)
def _normalize(self, source):
return os.path.normcase(os.path.normpath(os.path.abspath(source)))
def _contains(
self, container: dict, source: str, test_name: str, cache: dict
) -> bool:
# Note: we have a cache because _contains_uncached will always check
# the parent structure for hits and whenever we find a hit we
# can skip it.
key = (source, test_name)
ret = cache.get(key)
if ret is not None:
return ret
ret = self._contains_uncached(container, source, test_name, cache)
cache[key] = ret
return ret
def _contains_uncached(
self, container: dict, source: str, test_name: str, cache: dict
) -> bool:
# Try to check for the test directly
test_names = container.get(source)
if not test_names:
dirname = os.path.dirname(source)
if dirname == source or not dirname:
return False
return self._contains(
container,
dirname,
"*", # at a parent level the test name doesn't matter
cache,
)
if "*" in test_names:
return True
if test_name != "*":
return test_name in test_names
return False
def start_suite(self, suite) -> None:
new_tests = []
for t in suite.tests:
source = self._normalize(t.source)
if self.include:
if not self._contains(
self.include, source, t.name, self._include_contains_cache
):
log.debug("Test not in includes: %s - %s", t.source, t.name)
continue
# If we got here it's included, now, check excludes.
if self.exclude:
if self._contains(
self.exclude, source, t.name, self._exclude_contains_cache
):
log.debug("Test in excludes: %s - %s", t.source, t.name)
continue
new_tests.append(t)
suite.tests = new_tests
def end_suite(self, suite):
# We don't want to keep empty suites.
suite.suites = [s for s in suite.suites if s.test_count > 0]
| 32.554545
| 80
| 0.553756
|
6b398c7b7bca5ef650b470a8a59fc036b4f4712a
| 261
|
py
|
Python
|
stubs/ev3_pybricks_v1_0_0/ssl.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/ev3_pybricks_v1_0_0/ssl.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/ev3_pybricks_v1_0_0/ssl.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'ssl' on LEGO EV3 v1.0.0
"""
# MCU: sysname=ev3, nodename=ev3, release=('v1.0.0',), version=('0.0.0',), machine=ev3
# Stubber: 1.3.2 - updated
from typing import Any
CERT_NONE = None
_ussl = None
sym = "CERT_REQUIRED"
def wrap_socket():
pass
| 17.4
| 86
| 0.64751
|
b248f394c23e130b3dfe5441e8a53e820b64efbc
| 1,649
|
py
|
Python
|
utils/constants.py
|
paulaWesselmann/testing_pydial
|
bf4fd0c99242e49d67895d92c6cfc3dc31084182
|
[
"Apache-2.0"
] | 3
|
2019-09-27T06:07:12.000Z
|
2020-01-06T19:00:34.000Z
|
utils/constants.py
|
paulaWesselmann/testing_pydial
|
bf4fd0c99242e49d67895d92c6cfc3dc31084182
|
[
"Apache-2.0"
] | null | null | null |
utils/constants.py
|
paulaWesselmann/testing_pydial
|
bf4fd0c99242e49d67895d92c6cfc3dc31084182
|
[
"Apache-2.0"
] | 1
|
2019-11-27T09:28:10.000Z
|
2019-11-27T09:28:10.000Z
|
# copied from Pathak 2017
constants = {
'GAMMA': 0.99, # discount factor for rewards
'LAMBDA': 1.0, # lambda of Generalized Advantage Estimation: https://arxiv.org/abs/1506.02438
'ENTROPY_BETA': 0.0005, # entropy regurarlization constant. ... 0.01 for doom/0.0005 for mario
'ROLLOUT_MAXLEN': 20, # 20 represents the number of 'local steps': the number of timesteps
# we run the policy before we update the parameters.
# The larger local steps is, the lower is the variance in our policy gradients estimate
# on the one hand; but on the other hand, we get less frequent parameter updates, which
# slows down learning. In this code, we found that making local steps be much
# smaller than 20 makes the algorithm more difficult to tune and to get to work.
'GRAD_NORM_CLIP': 40.0, # gradient norm clipping
'REWARD_CLIP': 1.0, # reward value clipping in [-x,x]
'MAX_GLOBAL_STEPS': 100000000, # total steps taken across all workers
'LEARNING_RATE': 1e-4, # learning rate for adam
'PREDICTION_BETA': 0.2, # weight of prediction bonus ... 0.01 for doom/0.2 for mario
# set 0.5 for unsup=state
'PREDICTION_LR_SCALE': 10.0, # scale lr of predictor wrt to policy network
# set 30-50 for unsup=state
'FORWARD_LOSS_WT': 0.2, # should be between [0,1]
# predloss = ( (1-FORWARD_LOSS_WT) * inv_loss + FORWARD_LOSS_WT * forward_loss) * PREDICTION_LR_SCALE
'POLICY_NO_BACKPROP_STEPS': 0, # number of global steps after which we start backpropagating to policy
}
| 65.96
| 127
| 0.659794
|
17809252025934281de61c90488d95525f0f632a
| 9,308
|
py
|
Python
|
pandas/tests/window/conftest.py
|
Japanuspus/pandas
|
e38e987160c792f315685dc74fc1fc33d9389a71
|
[
"BSD-3-Clause"
] | 1
|
2021-01-06T00:28:03.000Z
|
2021-01-06T00:28:03.000Z
|
pandas/tests/window/conftest.py
|
Japanuspus/pandas
|
e38e987160c792f315685dc74fc1fc33d9389a71
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/tests/window/conftest.py
|
Japanuspus/pandas
|
e38e987160c792f315685dc74fc1fc33d9389a71
|
[
"BSD-3-Clause"
] | null | null | null |
from datetime import datetime, timedelta
import numpy as np
import pytest
import pandas.util._test_decorators as td
from pandas import DataFrame, Series, bdate_range, notna
@pytest.fixture(params=[True, False])
def raw(request):
return request.param
@pytest.fixture(
params=[
"triang",
"blackman",
"hamming",
"bartlett",
"bohman",
"blackmanharris",
"nuttall",
"barthann",
]
)
def win_types(request):
return request.param
@pytest.fixture(params=["kaiser", "gaussian", "general_gaussian", "exponential"])
def win_types_special(request):
return request.param
@pytest.fixture(
params=["sum", "mean", "median", "max", "min", "var", "std", "kurt", "skew"]
)
def arithmetic_win_operators(request):
return request.param
@pytest.fixture(params=["right", "left", "both", "neither"])
def closed(request):
return request.param
@pytest.fixture(params=[True, False])
def center(request):
return request.param
@pytest.fixture(params=[None, 1])
def min_periods(request):
return request.param
@pytest.fixture(params=[True, False])
def parallel(request):
"""parallel keyword argument for numba.jit"""
return request.param
@pytest.fixture(params=[True, False])
def nogil(request):
"""nogil keyword argument for numba.jit"""
return request.param
@pytest.fixture(params=[True, False])
def nopython(request):
"""nopython keyword argument for numba.jit"""
return request.param
@pytest.fixture(
params=[
pytest.param(
"numba", marks=td.skip_if_no("numba", "0.46.0")
), # type: ignore[list-item]
"cython",
]
)
def engine(request):
"""engine keyword argument for rolling.apply"""
return request.param
@pytest.fixture(
params=[
pytest.param(("numba", True), marks=td.skip_if_no("numba", "0.46.0")),
("cython", True),
("cython", False),
]
)
def engine_and_raw(request):
"""engine and raw keyword arguments for rolling.apply"""
return request.param
# create the data only once as we are not setting it
def _create_consistency_data():
def create_series():
return [
Series(dtype=object),
Series([np.nan]),
Series([np.nan, np.nan]),
Series([3.0]),
Series([np.nan, 3.0]),
Series([3.0, np.nan]),
Series([1.0, 3.0]),
Series([2.0, 2.0]),
Series([3.0, 1.0]),
Series(
[5.0, 5.0, 5.0, 5.0, np.nan, np.nan, np.nan, 5.0, 5.0, np.nan, np.nan]
),
Series(
[
np.nan,
5.0,
5.0,
5.0,
np.nan,
np.nan,
np.nan,
5.0,
5.0,
np.nan,
np.nan,
]
),
Series(
[
np.nan,
np.nan,
5.0,
5.0,
np.nan,
np.nan,
np.nan,
5.0,
5.0,
np.nan,
np.nan,
]
),
Series(
[
np.nan,
3.0,
np.nan,
3.0,
4.0,
5.0,
6.0,
np.nan,
np.nan,
7.0,
12.0,
13.0,
14.0,
15.0,
]
),
Series(
[
np.nan,
5.0,
np.nan,
2.0,
4.0,
0.0,
9.0,
np.nan,
np.nan,
3.0,
12.0,
13.0,
14.0,
15.0,
]
),
Series(
[
2.0,
3.0,
np.nan,
3.0,
4.0,
5.0,
6.0,
np.nan,
np.nan,
7.0,
12.0,
13.0,
14.0,
15.0,
]
),
Series(
[
2.0,
5.0,
np.nan,
2.0,
4.0,
0.0,
9.0,
np.nan,
np.nan,
3.0,
12.0,
13.0,
14.0,
15.0,
]
),
Series(range(10)),
Series(range(20, 0, -2)),
]
def create_dataframes():
return [
DataFrame(),
DataFrame(columns=["a"]),
DataFrame(columns=["a", "a"]),
DataFrame(columns=["a", "b"]),
DataFrame(np.arange(10).reshape((5, 2))),
DataFrame(np.arange(25).reshape((5, 5))),
DataFrame(np.arange(25).reshape((5, 5)), columns=["a", "b", 99, "d", "d"]),
] + [DataFrame(s) for s in create_series()]
def is_constant(x):
values = x.values.ravel("K")
return len(set(values[notna(values)])) == 1
def no_nans(x):
return x.notna().all().all()
# data is a tuple(object, is_constant, no_nans)
data = create_series() + create_dataframes()
return [(x, is_constant(x), no_nans(x)) for x in data]
@pytest.fixture(params=_create_consistency_data())
def consistency_data(request):
"""Create consistency data"""
return request.param
def _create_arr():
"""Internal function to mock an array."""
arr = np.random.randn(100)
locs = np.arange(20, 40)
arr[locs] = np.NaN
return arr
def _create_rng():
"""Internal function to mock date range."""
rng = bdate_range(datetime(2009, 1, 1), periods=100)
return rng
def _create_series():
"""Internal function to mock Series."""
arr = _create_arr()
series = Series(arr.copy(), index=_create_rng())
return series
def _create_frame():
"""Internal function to mock DataFrame."""
rng = _create_rng()
return DataFrame(np.random.randn(100, 10), index=rng, columns=np.arange(10))
@pytest.fixture
def nan_locs():
"""Make a range as loc fixture."""
return np.arange(20, 40)
@pytest.fixture
def arr():
"""Make an array as fixture."""
return _create_arr()
@pytest.fixture
def frame():
"""Make mocked frame as fixture."""
return _create_frame()
@pytest.fixture
def series():
"""Make mocked series as fixture."""
return _create_series()
@pytest.fixture(params=[_create_series(), _create_frame()])
def which(request):
"""Turn parametrized which as fixture for series and frame"""
return request.param
@pytest.fixture(params=["1 day", timedelta(days=1)])
def halflife_with_times(request):
"""Halflife argument for EWM when times is specified."""
return request.param
@pytest.fixture(
params=[
"object",
"category",
"int8",
"int16",
"int32",
"int64",
"uint8",
"uint16",
"uint32",
"uint64",
"float16",
"float32",
"float64",
"m8[ns]",
"M8[ns]",
pytest.param( # type: ignore[list-item]
"datetime64[ns, UTC]",
marks=pytest.mark.skip(
"direct creation of extension dtype datetime64[ns, UTC] "
"is not supported ATM"
),
),
]
)
def dtypes(request):
"""Dtypes for window tests"""
return request.param
@pytest.fixture(
params=[
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=[1, 0]),
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=[1, 1]),
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=["C", "C"]),
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=[1.0, 0]),
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=[0.0, 1]),
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=["C", 1]),
DataFrame([[2.0, 4.0], [1.0, 2.0], [5.0, 2.0], [8.0, 1.0]], columns=[1, 0.0]),
DataFrame([[2, 4.0], [1, 2.0], [5, 2.0], [8, 1.0]], columns=[0, 1.0]),
DataFrame([[2, 4], [1, 2], [5, 2], [8, 1.0]], columns=[1.0, "X"]),
]
)
def pairwise_frames(request):
"""Pairwise frames test_pairwise"""
return request.param
@pytest.fixture
def pairwise_target_frame():
"""Pairwise target frame for test_pairwise"""
return DataFrame([[2, 4], [1, 2], [5, 2], [8, 1]], columns=[0, 1])
@pytest.fixture
def pairwise_other_frame():
"""Pairwise other frame for test_pairwise"""
return DataFrame(
[[None, 1, 1], [None, 1, 2], [None, 3, 2], [None, 8, 1]],
columns=["Y", "Z", "X"],
)
| 24.559367
| 87
| 0.45015
|
9b3cc1f6523e1a2ab8af6151f8d387ed20705954
| 11,017
|
py
|
Python
|
services/ingest-file/ingestors/support/cellebrite.py
|
fastbone/aleph
|
82fea8b1eba21462b74bdf4dd9eabe0574b234b9
|
[
"MIT"
] | null | null | null |
services/ingest-file/ingestors/support/cellebrite.py
|
fastbone/aleph
|
82fea8b1eba21462b74bdf4dd9eabe0574b234b9
|
[
"MIT"
] | null | null | null |
services/ingest-file/ingestors/support/cellebrite.py
|
fastbone/aleph
|
82fea8b1eba21462b74bdf4dd9eabe0574b234b9
|
[
"MIT"
] | null | null | null |
import logging
import lxml.etree as ET
from banal import ensure_list
from normality import stringify
from ingestors.support.timestamp import TimestampSupport
log = logging.getLogger(__name__)
OUTGOING = 'Outgoing'
class CellebriteSupport(TimestampSupport):
NS = "http://pa.cellebrite.com/report/2.0"
NSMAP = {"ns": NS}
def _ns_tag(self, tag):
return '{{{0}}}{1}'.format(self.NS, tag)
def get_seconds(self, time_str):
"""Get Seconds from time"""
h, m, s = time_str.split(':')
return float(h) * 3600 + float(m) * 60 + float(s)
def _field_values(self, el, name):
query = './ns:field[@name="%s"]/ns:value/text()' % name
values = []
for value in el.xpath(query, namespaces=self.NSMAP):
value = stringify(value)
if value is not None:
values.append(value)
return list(sorted(values))
def _models(self, el, name):
query = ".//ns:model[@type='%s']" % name
yield from el.xpath(query, namespaces=self.NSMAP)
def _get_party(self, names, identifiers, proof=None):
party = self.manager.make_entity('LegalEntity')
party.add('name', names)
party.add('proof', proof)
for identifier in sorted(identifiers, key=len, reverse=True):
prop = 'email' if '@' in identifier else 'phone'
party.add(prop, identifier)
if not party.id:
party.make_id(identifier)
if not party.id:
party.make_id(*ensure_list(names))
if party.id:
self.manager.emit_entity(party)
return party
def parse_metadata(self, doc, file_path):
context = ET.iterparse(str(file_path), events=('end', ),
recover=True, tag=self._ns_tag('metadata'))
project_id = None
for event, meta in context:
project = meta.getparent()
project_id = project_id or project.get('id')
if project is not None and project.tag != self._ns_tag('project'):
meta.clear()
break
owner = self.manager.make_entity('LegalEntity')
owner.add('proof', doc)
identities = set()
identities.update(self._item(meta, 'DeviceInfoUniqueID'))
identities.update(self._item(meta, 'IMEI'))
identities.update(self._item(meta, 'DeviceInfoUnitIdentifier'))
if len(identities) and not owner.id:
owner.make_id(project_id, *sorted(identities))
owner.add('name', self._item(meta, 'DeviceInfoOwnerName'))
owner.add('email', self._item(meta, 'DeviceInfoAppleID'))
owner.add('phone', self._item(meta, 'MSISDN'))
if not owner.has('name'):
owner.add('name', self._item(meta, 'DeviceInfoDetectedModel'))
if not owner.has('name'):
man = self._item(meta, 'DeviceInfoSelectedManufacturer')
name = self._item(meta, 'DeviceInfoSelectedDeviceName')
if name is not None and man is not None:
owner.add('name', '%s (%s)' % (name, man))
meta.clear()
del context
if owner.id is not None:
self.manager.emit_entity(owner)
return project_id, owner
def parse_content(self, entity, file_path, owner, project_id):
# We're using iterparse instead of xpaths to reduce memory usage.
# iterparse parses the file top to bottom and emits `start` and `end`
# events when it encounters the start or end of a tag. We want to clear
# a tag and its children once we are done processing the tag but not
# before that.
context = ET.iterparse(str(file_path), events=('start', 'end'),
recover=True)
# stores children tags to be cleared after the parent we are interested
# in is processed
elements_to_clear = []
# id of the element being processed currently
element_being_processed = None
for event, el in context:
parent = el.getparent()
if parent is not None and parent.tag == self._ns_tag('modelType'):
type_ = el.get('type')
if type_ in ('Call', 'Chat', 'Note', 'SMS', 'Contact'):
if event == 'start':
# Set the element being processed
element_being_processed = el.get('id')
continue
else:
if type_ == 'Call':
self.parse_calls(el, entity, project_id, owner)
elif type_ == 'Chat':
self.parse_messages(el, entity, project_id, owner)
elif type_ == 'Note':
self.parse_notes(el, entity, project_id)
elif type_ == 'SMS':
self.parse_sms(el, entity, project_id)
elif type_ == 'Contact':
self.parse_contacts(el, entity, project_id)
# We're done with processing an element. Clear it and
# its children elements
while elements_to_clear:
el = elements_to_clear.pop(0)
el.clear()
if event == 'end':
if element_being_processed is not None:
# we are yet to process the parent element; don't clear
# the child element yet.
elements_to_clear.append(el)
else:
# No element is being processed right now; it's safe to
# clear the element
el.clear()
del context
def parse_parties(self, parties):
for party in parties:
names = self._field_values(party, 'Name')
identifiers = self._field_values(party, 'Identifier')
yield self._get_party(names, identifiers)
def parse_calls(self, call, doc, project_id, owner):
entity = self.manager.make_entity('Call')
entity.make_id(project_id, call.get('id'))
# entity.add('proof', doc)
for timestamp in self._field_values(call, 'TimeStamp'):
entity.add('date', self.parse_timestamp(timestamp))
for duration in self._field_values(call, 'Duration'):
entity.add('duration', self.get_seconds(duration))
call_types = self._field_values(call, 'Type')
if OUTGOING in call_types:
entity.add('caller', owner)
entity.add('callerNumber', owner.get('phone'))
else:
entity.add('receiver', owner)
entity.add('receiverNumber', owner.get('phone'))
for party in self.parse_parties(self._models(call, 'Party')):
if OUTGOING in call_types:
entity.add('receiver', party)
entity.add('receiverNumber', party.get('phone'))
else:
entity.add('caller', party)
entity.add('callerNumber', party.get('phone'))
self.manager.emit_entity(entity)
def parse_messages(self, thread, doc, project_id, owner):
"""Message Parsing"""
ns = self.NSMAP
thread_id = thread.get('id')
thread_name = self._field_values(thread, 'Name')
thread_description = self._field_values(thread, 'Description')
last_message = None
for message in self._models(thread, 'InstantMessage'):
message_id = message.get('id')
entity = self.manager.make_entity('Message')
entity.make_id(project_id, thread_id, message_id)
entity.add('proof', doc)
for timestamp in self._field_values(message, 'TimeStamp'):
entity.add('date', self.parse_timestamp(timestamp))
entity.add('subject', self._field_values(message, 'Subject'))
entity.add('threadTopic', thread_name)
entity.add('threadTopic', thread_description)
senders = message.xpath('./ns:modelField[@name="From"]/ns:model[@type="Party"]', namespaces=ns) # noqa
for sender in self.parse_parties(senders):
entity.add('sender', sender)
receivers = message.xpath('./ns:modelField[@name="To"]/ns:model[@type="Party"]', namespaces=ns) # noqa
for receiver in self.parse_parties(receivers):
entity.add('recipients', receiver)
status = self._field_values(message, 'Status')
if 'Read' in status:
entity.add('recipients', owner)
elif 'Sent' in status:
entity.add('sender', owner)
entity.add('bodyText', self._field_values(message, 'Body'))
# attachments = message.xpath(
# './ns:multiModelField[@name="Attachments"]/'
# 'ns:model[@type="Attachment"]/ns:field[@name="Filename"]'
# '/ns:value/text()', namespaces=ns
# )
# entity.add('metadata', {'attachments': attachments})
entity.add('inReplyToMessage', last_message)
last_message = entity
self.manager.emit_entity(entity)
def parse_contacts(self, contact, doc, project_id):
name = self._field_values(contact, 'Name')
numbers = []
for el in self._models(contact, 'PhoneNumber'):
numbers.extend(self._field_values(el, 'Value'))
self._get_party(name, numbers, proof=doc)
def parse_notes(self, note, doc, project_id):
entity = self.manager.make_entity('PlainText')
entity.make_id(project_id, note.get('id'))
entity.add('proof', doc)
entity.add('title', self._field_values(note, 'Title'))
entity.add('summary', self._field_values(note, 'Summary'))
entity.add('bodyText', self._field_values(note, 'Body'))
for timestamp in self._field_values(note, 'Creation'):
entity.add('date', self.parse_timestamp(timestamp))
self.manager.emit_entity(entity)
def parse_sms(self, sms, doc, project_id):
entity = self.manager.make_entity('Message')
entity.make_id(project_id, sms.get('id'))
entity.add('proof', doc)
entity.add('bodyText', self._field_values(sms, 'Body'))
for timestamp in self._field_values(sms, 'TimeStamp'):
entity.add('date', self.parse_timestamp(timestamp))
for party in self._models(sms, 'Party'):
name = self._field_values(party, 'Name')
number = self._field_values(party, 'Identifier')
party_entity = self._get_party(name, number, proof=doc)
if 'From' in self._field_values(party, 'Role'):
entity.add('sender', party_entity)
else:
entity.add('recipients', party_entity)
self.manager.emit_entity(entity)
| 43.374016
| 115
| 0.5703
|
1ca4f0d92fe7d5c6c22af3c59043613851165148
| 351
|
py
|
Python
|
Python/overload-example.py
|
saneravi/ML_Stuff
|
74e1ed7ba9f4dccb555792315a14ba6071150304
|
[
"MIT"
] | 209
|
2015-01-02T03:47:12.000Z
|
2022-03-06T16:54:47.000Z
|
Python/overload-example.py
|
Kerwin-Xie/algorithms
|
4347a9b7bf54ef378d16d26ef9e357ddc710664b
|
[
"MIT"
] | 3
|
2015-12-06T14:40:34.000Z
|
2021-03-22T17:40:24.000Z
|
Python/overload-example.py
|
Kerwin-Xie/algorithms
|
4347a9b7bf54ef378d16d26ef9e357ddc710664b
|
[
"MIT"
] | 114
|
2015-01-31T08:37:10.000Z
|
2022-02-23T04:42:28.000Z
|
from typing import overload
@overload
def upcase(s: str) -> str:
...
@overload
def upcase(s: bytes) -> bytes:
...
def upcase(s):
if isinstance(s, str):
return s.upper()
elif isinstance(s, bytes):
return bytes(x - 0x20 if 0x61 <= x <= 0x7A else x for x in s)
else:
raise TypeError("need str or bytes")
| 16.714286
| 69
| 0.584046
|
b674bd6528d39fc2f26844cae88de7b91e1d3ef0
| 103
|
py
|
Python
|
looking_for_group/world/apps.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
looking_for_group/world/apps.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
looking_for_group/world/apps.py
|
andrlik/looking-for-group
|
0b1cecb37ef0f6d75692fd188130e2c60d09b7d2
|
[
"BSD-3-Clause"
] | null | null | null |
from django.apps import AppConfig
class WorldConfig(AppConfig):
name = "looking_for_group.world"
| 17.166667
| 36
| 0.776699
|
3003f8f00b5829d758ecff0359035a285c42f6a5
| 11,985
|
py
|
Python
|
components/esptool_py/esptool/ecdsa/six.py
|
dereklitao/ESP8266_RTOS_SDK
|
8680849684f17ebc1a71df5be7f9547d9ecbecc5
|
[
"Apache-2.0"
] | 2,701
|
2015-10-28T08:28:16.000Z
|
2022-03-31T16:51:13.000Z
|
components/esptool_py/esptool/ecdsa/six.py
|
dereklitao/ESP8266_RTOS_SDK
|
8680849684f17ebc1a71df5be7f9547d9ecbecc5
|
[
"Apache-2.0"
] | 1,136
|
2015-11-09T02:40:55.000Z
|
2022-03-30T21:28:00.000Z
|
components/esptool_py/esptool/ecdsa/six.py
|
pheki/ESP8266_RTOS_SDK
|
89a3f254b63819035f65d9c5dcdae8864f1a6a8a
|
[
"Apache-2.0"
] | 1,577
|
2015-10-27T15:29:27.000Z
|
2022-03-31T07:11:49.000Z
|
"""Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2012 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.2.0"
# True if we are running on Python 3.
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result)
# This is a bit ugly, but it avoids running this again.
delattr(tp, self.name)
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _MovedItems(types.ModuleType):
"""Lazy loading of moved objects"""
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
del attr
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_iterkeys = "keys"
_itervalues = "values"
_iteritems = "items"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_code = "func_code"
_func_defaults = "func_defaults"
_iterkeys = "iterkeys"
_itervalues = "itervalues"
_iteritems = "iteritems"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
def iterkeys(d):
"""Return an iterator over the keys of a dictionary."""
return iter(getattr(d, _iterkeys)())
def itervalues(d):
"""Return an iterator over the values of a dictionary."""
return iter(getattr(d, _itervalues)())
def iteritems(d):
"""Return an iterator over the (key, value) pairs of a dictionary."""
return iter(getattr(d, _iteritems)())
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
if sys.version_info[1] <= 1:
def int2byte(i):
return bytes((i,))
else:
# This is about 2x faster than the implementation above on 3.2+
int2byte = operator.methodcaller("to_bytes", 1, "big")
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
else:
def b(s):
return s
def u(s):
if isinstance(s, unicode):
return s
return unicode(s, "unicode_escape")
int2byte = chr
import StringIO
StringIO = BytesIO = StringIO.StringIO
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
print_ = getattr(builtins, "print")
del builtins
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
def print_(*args, **kwargs):
"""The new-style print function."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
_add_doc(reraise, """Reraise an exception.""")
def with_metaclass(meta, base=object):
"""Create a base class with a metaclass."""
return meta("NewBase", (base,), {})
| 30.341772
| 87
| 0.624698
|
99aaa0b0fea1e7e1c4d6765a6e6beef6c2929775
| 9,416
|
py
|
Python
|
tensorflow_tts/utils/korean.py
|
Joovvhan/TensorFlowTTS
|
1303ab8db8830afa64af7e9f2e4a038fd52f7355
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_tts/utils/korean.py
|
Joovvhan/TensorFlowTTS
|
1303ab8db8830afa64af7e9f2e4a038fd52f7355
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_tts/utils/korean.py
|
Joovvhan/TensorFlowTTS
|
1303ab8db8830afa64af7e9f2e4a038fd52f7355
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
# Code based on https://github.com/carpedm20/multi-speaker-tacotron-tensorflow
import re
import os
import ast
import json
from jamo import hangul_to_jamo, h2j, j2h, jamo_to_hcj
from .ko_dictionary import english_dictionary, etc_dictionary
"""
초성과 종성은 같아보이지만, 다른 character이다.
'_-!'(),-.:;? ᄀᄁᄂᄃᄄᄅᄆᄇᄈᄉᄊᄋᄌᄍᄎᄏᄐᄑ하ᅢᅣᅤᅥᅦᅧᅨᅩᅪᅫᅬᅭᅮᅯᅰᅱᅲᅳᅴᅵᆨᆩᆪᆫᆬᆭᆮᆯᆰᆱᆲᆳᆴᆵᆶᆷᆸᆹᆺᆻᆼᆽᆾᆿᇀᇁᇂ~'
'_': 0, '-': 7, '!': 2, "'": 3, '(': 4, ')': 5, ',': 6, '.': 8, ':': 9, ';': 10,
'?': 11, ' ': 12, 'ᄀ': 13, 'ᄁ': 14, 'ᄂ': 15, 'ᄃ': 16, 'ᄄ': 17, 'ᄅ': 18, 'ᄆ': 19, 'ᄇ': 20,
'ᄈ': 21, 'ᄉ': 22, 'ᄊ': 23, 'ᄋ': 24, 'ᄌ': 25, 'ᄍ': 26, 'ᄎ': 27, 'ᄏ': 28, 'ᄐ': 29, 'ᄑ': 30,
'ᄒ': 31, 'ᅡ': 32, 'ᅢ': 33, 'ᅣ': 34, 'ᅤ': 35, 'ᅥ': 36, 'ᅦ': 37, 'ᅧ': 38, 'ᅨ': 39, 'ᅩ': 40,
'ᅪ': 41, 'ᅫ': 42, 'ᅬ': 43, 'ᅭ': 44, 'ᅮ': 45, 'ᅯ': 46, 'ᅰ': 47, 'ᅱ': 48, 'ᅲ': 49, 'ᅳ': 50,
'ᅴ': 51, 'ᅵ': 52, 'ᆨ': 53, 'ᆩ': 54, 'ᆪ': 55, 'ᆫ': 56, 'ᆬ': 57, 'ᆭ': 58, 'ᆮ': 59, 'ᆯ': 60,
'ᆰ': 61, 'ᆱ': 62, 'ᆲ': 63, 'ᆳ': 64, 'ᆴ': 65, 'ᆵ': 66, 'ᆶ': 67, 'ᆷ': 68, 'ᆸ': 69, 'ᆹ': 70,
'ᆺ': 71, 'ᆻ': 72, 'ᆼ': 73, 'ᆽ': 74, 'ᆾ': 75, 'ᆿ': 76, 'ᇀ': 77, 'ᇁ': 78, 'ᇂ': 79, '~': 80
"""
_pad = "_"
_eos = "~"
_punctuation = "!'(),-.:;? "
_special = "-"
_jamo_leads = [chr(_) for _ in range(0x1100, 0x1113)]
_jamo_vowels = [chr(_) for _ in range(0x1161, 0x1176)]
_jamo_tails = [chr(_) for _ in range(0x11A8, 0x11C3)]
_letters = _jamo_leads + _jamo_vowels + _jamo_tails
symbols = [_pad] + list(_special) + list(_punctuation) + _letters + [_eos]
_symbol_to_id = {c: i for i, c in enumerate(symbols)}
_id_to_symbol = {i: c for i, c in enumerate(symbols)}
quote_checker = """([`"'"“‘])(.+?)([`"'"”’])"""
def is_lead(char):
return char in _jamo_leads
def is_vowel(char):
return char in _jamo_vowels
def is_tail(char):
return char in _jamo_tails
def get_mode(char):
if is_lead(char):
return 0
elif is_vowel(char):
return 1
elif is_tail(char):
return 2
else:
return -1
def _get_text_from_candidates(candidates):
if len(candidates) == 0:
return ""
elif len(candidates) == 1:
return jamo_to_hcj(candidates[0])
else:
return j2h(**dict(zip(["lead", "vowel", "tail"], candidates)))
def jamo_to_korean(text):
text = h2j(text)
idx = 0
new_text = ""
candidates = []
while True:
if idx >= len(text):
new_text += _get_text_from_candidates(candidates)
break
char = text[idx]
mode = get_mode(char)
if mode == 0:
new_text += _get_text_from_candidates(candidates)
candidates = [char]
elif mode == -1:
new_text += _get_text_from_candidates(candidates)
new_text += char
candidates = []
else:
candidates.append(char)
idx += 1
return new_text
num_to_kor = {
"0": "영",
"1": "일",
"2": "이",
"3": "삼",
"4": "사",
"5": "오",
"6": "육",
"7": "칠",
"8": "팔",
"9": "구",
}
unit_to_kor1 = {"%": "퍼센트", "cm": "센치미터", "mm": "밀리미터", "km": "킬로미터", "kg": "킬로그람"}
unit_to_kor2 = {"m": "미터"}
upper_to_kor = {
"A": "에이",
"B": "비",
"C": "씨",
"D": "디",
"E": "이",
"F": "에프",
"G": "지",
"H": "에이치",
"I": "아이",
"J": "제이",
"K": "케이",
"L": "엘",
"M": "엠",
"N": "엔",
"O": "오",
"P": "피",
"Q": "큐",
"R": "알",
"S": "에스",
"T": "티",
"U": "유",
"V": "브이",
"W": "더블유",
"X": "엑스",
"Y": "와이",
"Z": "지",
}
def compare_sentence_with_jamo(text1, text2):
return h2j(text1) != h2j(text2)
def tokenize(text, as_id=False):
# jamo package에 있는 hangul_to_jamo를 이용하여 한글 string을 초성/중성/종성으로 나눈다.
text = normalize(text)
tokens = list(
hangul_to_jamo(text)
) # '존경하는' --> ['ᄌ', 'ᅩ', 'ᆫ', 'ᄀ', 'ᅧ', 'ᆼ', 'ᄒ', 'ᅡ', 'ᄂ', 'ᅳ', 'ᆫ', '~']
if as_id:
return [_symbol_to_id[token] for token in tokens]
else:
return [token for token in tokens]
def tokenizer_fn(iterator):
return (token for x in iterator for token in tokenize(x, as_id=False))
def normalize(text):
text = text.strip()
text = re.sub("\(\d+일\)", "", text)
text = re.sub("\([⺀-⺙⺛-⻳⼀-⿕々〇〡-〩〸-〺〻㐀-䶵一-鿃豈-鶴侮-頻並-龎]+\)", "", text)
text = normalize_with_dictionary(text, etc_dictionary)
text = normalize_english(text)
text = re.sub("[a-zA-Z]+", normalize_upper, text)
text = normalize_quote(text)
text = normalize_number(text)
return text
def normalize_with_dictionary(text, dic):
if any(key in text for key in dic.keys()):
pattern = re.compile("|".join(re.escape(key) for key in dic.keys()))
return pattern.sub(lambda x: dic[x.group()], text)
else:
return text
def normalize_english(text):
def fn(m):
word = m.group()
if word in english_dictionary:
return english_dictionary.get(word)
else:
return word
text = re.sub("([A-Za-z]+)", fn, text)
return text
def normalize_upper(text):
text = text.group(0)
if all([char.isupper() for char in text]):
return "".join(upper_to_kor[char] for char in text)
else:
return text
def normalize_quote(text):
def fn(found_text):
from nltk import sent_tokenize # NLTK doesn't along with multiprocessing
found_text = found_text.group()
unquoted_text = found_text[1:-1]
sentences = sent_tokenize(unquoted_text)
return " ".join(["'{}'".format(sent) for sent in sentences])
return re.sub(quote_checker, fn, text)
number_checker = "([+-]?\d[\d,]*)[\.]?\d*"
count_checker = "(시|명|가지|살|마리|포기|송이|수|톨|통|점|개|벌|척|채|다발|그루|자루|줄|켤레|그릇|잔|마디|상자|사람|곡|병|판)"
def normalize_number(text):
text = normalize_with_dictionary(text, unit_to_kor1)
text = normalize_with_dictionary(text, unit_to_kor2)
text = re.sub(
number_checker + count_checker, lambda x: number_to_korean(x, True), text
)
text = re.sub(number_checker, lambda x: number_to_korean(x, False), text)
return text
num_to_kor1 = [""] + list("일이삼사오육칠팔구")
num_to_kor2 = [""] + list("만억조경해")
num_to_kor3 = [""] + list("십백천")
# count_to_kor1 = [""] + ["하나","둘","셋","넷","다섯","여섯","일곱","여덟","아홉"]
count_to_kor1 = [""] + ["한", "두", "세", "네", "다섯", "여섯", "일곱", "여덟", "아홉"]
count_tenth_dict = {
"십": "열",
"두십": "스물",
"세십": "서른",
"네십": "마흔",
"다섯십": "쉰",
"여섯십": "예순",
"일곱십": "일흔",
"여덟십": "여든",
"아홉십": "아흔",
}
def number_to_korean(num_str, is_count=False):
if is_count:
num_str, unit_str = num_str.group(1), num_str.group(2)
else:
num_str, unit_str = num_str.group(), ""
num_str = num_str.replace(",", "")
num = ast.literal_eval(num_str)
if num == 0:
return "영"
check_float = num_str.split(".")
if len(check_float) == 2:
digit_str, float_str = check_float
elif len(check_float) >= 3:
raise Exception(" [!] Wrong number format")
else:
digit_str, float_str = check_float[0], None
if is_count and float_str is not None:
raise Exception(" [!] `is_count` and float number does not fit each other")
digit = int(digit_str)
if digit_str.startswith("-"):
digit, digit_str = abs(digit), str(abs(digit))
kor = ""
size = len(str(digit))
tmp = []
for i, v in enumerate(digit_str, start=1):
v = int(v)
if v != 0:
if is_count:
tmp += count_to_kor1[v]
else:
tmp += num_to_kor1[v]
tmp += num_to_kor3[(size - i) % 4]
if (size - i) % 4 == 0 and len(tmp) != 0:
kor += "".join(tmp)
tmp = []
kor += num_to_kor2[int((size - i) / 4)]
if is_count:
if kor.startswith("한") and len(kor) > 1:
kor = kor[1:]
if any(word in kor for word in count_tenth_dict):
kor = re.sub(
"|".join(count_tenth_dict.keys()),
lambda x: count_tenth_dict[x.group()],
kor,
)
if not is_count and kor.startswith("일") and len(kor) > 1:
kor = kor[1:]
if float_str is not None:
kor += "쩜 "
kor += re.sub("\d", lambda x: num_to_kor[x.group()], float_str)
if num_str.startswith("+"):
kor = "플러스 " + kor
elif num_str.startswith("-"):
kor = "마이너스 " + kor
return kor + unit_str
if __name__ == "__main__":
def test_normalize(text):
print(text)
print(normalize(text))
print("=" * 30)
test_normalize("JTBC는 JTBCs를 DY는 A가 Absolute")
test_normalize("오늘(13일) 3,600마리 강아지가")
test_normalize("60.3%")
test_normalize('"저돌"(猪突) 입니다.')
test_normalize("비대위원장이 지난 1월 이런 말을 했습니다. “난 그냥 산돼지처럼 돌파하는 스타일이다”")
test_normalize("지금은 -12.35%였고 종류는 5가지와 19가지, 그리고 55가지였다")
test_normalize("JTBC는 TH와 K 양이 2017년 9월 12일 오후 12시에 24살이 된다")
print(
list(
hangul_to_jamo(
list(hangul_to_jamo("비대위원장이 지난 1월 이런 말을 했습니다? “난 그냥 산돼지처럼 돌파하는 스타일이다”"))
)
)
)
| 26.083102
| 90
| 0.514019
|
002d6336d70435ce3f181682a33f626aa6c7772d
| 56
|
py
|
Python
|
daemons/message/__init__.py
|
alexdelorenzo/daemons
|
1fe2c9886f76f51e1e459fe97f2ac360869deb19
|
[
"Apache-2.0"
] | 50
|
2015-06-18T18:25:27.000Z
|
2022-02-08T22:29:09.000Z
|
daemons/message/__init__.py
|
alexdelorenzo/daemons
|
1fe2c9886f76f51e1e459fe97f2ac360869deb19
|
[
"Apache-2.0"
] | 9
|
2015-11-27T14:39:58.000Z
|
2021-05-21T19:18:16.000Z
|
daemons/message/__init__.py
|
alexdelorenzo/daemons
|
1fe2c9886f76f51e1e459fe97f2ac360869deb19
|
[
"Apache-2.0"
] | 16
|
2015-10-17T19:10:55.000Z
|
2021-05-20T05:48:51.000Z
|
"""Implementations of the message manager interface."""
| 28
| 55
| 0.767857
|
5ae7f2aadb60e6b18f69fc536e6e77c48f1563a6
| 34,493
|
py
|
Python
|
dafne/modeling/dafne/dafne_outputs.py
|
qilei123/DAFNe
|
6ae6c17ecef6b88e21843969e456fc83b34da0fe
|
[
"MIT"
] | 35
|
2021-09-14T03:09:34.000Z
|
2022-03-30T21:29:59.000Z
|
dafne/modeling/dafne/dafne_outputs.py
|
qilei123/DAFNe
|
6ae6c17ecef6b88e21843969e456fc83b34da0fe
|
[
"MIT"
] | 5
|
2021-11-28T07:49:36.000Z
|
2022-03-25T10:40:46.000Z
|
dafne/modeling/dafne/dafne_outputs.py
|
qilei123/DAFNe
|
6ae6c17ecef6b88e21843969e456fc83b34da0fe
|
[
"MIT"
] | 10
|
2021-09-15T12:59:56.000Z
|
2022-03-30T10:15:33.000Z
|
import logging
import torch
import torch.nn.functional as F
from fvcore.nn import sigmoid_focal_loss_jit
from torch import distributed as dist
from torch import nn
from detectron2.layers import cat
from detectron2.structures import Instances
from detectron2.structures.boxes import Boxes
from detectron2.utils.comm import get_world_size
from dafne.modeling.losses.smooth_l1 import ModulatedEightPointLoss, SmoothL1Loss
from dafne.modeling.nms.nms import ml_nms
from dafne.utils.sort_corners import sort_quadrilateral
logger = logging.getLogger(__name__)
INF = 100000000
"""
Shape shorthand in this module:
N: number of images in the minibatch
L: number of feature maps per image on which RPN is run
Hi, Wi: height and width of the i-th feature map
8: size of the box parameterization
Naming convention:
labels: refers to the ground-truth class of an position.
reg_targets: refers to the 4-d (left, top, right, bottom) distances that parameterize the ground-truth box.
logits_pred: predicted classification scores in [-inf, +inf];
reg_pred: the predicted (left, top, right, bottom), corresponding to reg_targets
ctrness_pred: predicted centerness scores
"""
def reduce_sum(tensor):
world_size = get_world_size()
if world_size < 2:
return tensor
tensor = tensor.clone()
dist.all_reduce(tensor, op=dist.ReduceOp.SUM)
return tensor
def dist_point_to_line(p1, p2, x0, y0):
"""
https://en.wikipedia.org/wiki/Distance_from_a_point_to_a_line
Line defined by P1=(x1,y1), P2=(x2,y2)
Point defined by P0=(x0, y0)
"""
x1, y1 = p1.unbind(2)
x2, y2 = p2.unbind(2)
nom = torch.abs((y2 - y1) * x0 - (x2 - x1) * y0 + x2 * y1 - y2 * x1)
denom = torch.sqrt((y2 - y1) ** 2 + (x2 - x1) ** 2)
return nom / denom
def compute_abcd(corners, xs_ext, ys_ext):
num_locs = len(xs_ext)
num_targets = corners.shape[0]
corners_rep = corners[None].repeat(num_locs, 1, 1)
c0, c1, c2, c3 = corners_rep.view(num_locs, num_targets, 4, 2).unbind(2)
left = torch.stack((c0, c1, c2, c3), dim=-1)
right = torch.stack((c1, c2, c3, c0), dim=-1)
abcd = dist_point_to_line(left, right, xs_ext[..., None], ys_ext[..., None])
return abcd
def compute_ctrness_targets(reg_targets, alpha):
if len(reg_targets) == 0:
return reg_targets.new_zeros(len(reg_targets))
left_right = reg_targets[:, [0, 2]]
top_bottom = reg_targets[:, [1, 3]]
ctrness = (left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * (
top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0]
)
ctrness = ctrness ** (1 / alpha)
# Set critical cases where the ctrness computation was not possible to zero
ctrness[torch.isnan(ctrness)] = 0.0
return ctrness
def _cross2d(x, y):
"""Cross product in 2D."""
return x[:, :, 0] * y[:, :, 1] - x[:, :, 1] * y[:, :, 0]
def area_triangle(a, b, c):
"""Area of a triangle"""
x = a - c
y = b - c
crs = 1 / 2 * torch.abs(_cross2d(x, y))
return crs
def is_in_quadrilateral(c0, c1, c2, c3, poly_area, loc):
"""Check if loc is in the given quadrilateral.
Assumes, that the quadrilateral is sorted."""
# Compute area between edges and loc
a = area_triangle(c0, c1, loc)
b = area_triangle(c1, c2, loc)
c = area_triangle(c2, c3, loc)
d = area_triangle(c3, c0, loc)
sum_area_to_loc = a + b + c + d
return ~(sum_area_to_loc > (poly_area + 1e-3)) # 1e-3 is some epsilon to avoid equality
class DAFNeOutputs(nn.Module):
def __init__(self, cfg):
super(DAFNeOutputs, self).__init__()
self.cfg = cfg
self.focal_loss_alpha = cfg.MODEL.DAFNE.LOSS_ALPHA
self.focal_loss_gamma = cfg.MODEL.DAFNE.LOSS_GAMMA
self.center_sample = cfg.MODEL.DAFNE.CENTER_SAMPLE
self.radius = cfg.MODEL.DAFNE.POS_RADIUS
self.pre_nms_thresh_train = cfg.MODEL.DAFNE.INFERENCE_TH_TRAIN
self.pre_nms_topk_train = cfg.MODEL.DAFNE.PRE_NMS_TOPK_TRAIN
self.post_nms_topk_train = cfg.MODEL.DAFNE.POST_NMS_TOPK_TRAIN
self.sort_corners = cfg.MODEL.DAFNE.SORT_CORNERS
logspace = cfg.MODEL.DAFNE.ENABLE_LOSS_LOG
beta = cfg.MODEL.DAFNE.LOSS_SMOOTH_L1_BETA
if cfg.MODEL.DAFNE.ENABLE_LOSS_MODULATION:
self.corners_loss_func = ModulatedEightPointLoss(
beta=beta,
reduction="sum",
logspace=logspace,
)
else:
self.corners_loss_func = SmoothL1Loss(
beta=beta,
reduction="sum",
logspace=logspace,
)
self.center_loss_func = SmoothL1Loss(
beta=beta,
reduction="sum",
logspace=logspace,
)
self.pre_nms_thresh_test = cfg.MODEL.DAFNE.INFERENCE_TH_TEST
self.pre_nms_topk_test = cfg.MODEL.DAFNE.PRE_NMS_TOPK_TEST
self.post_nms_topk_test = cfg.MODEL.DAFNE.POST_NMS_TOPK_TEST
self.nms_thresh = cfg.MODEL.DAFNE.NMS_TH
self.thresh_with_ctr = cfg.MODEL.DAFNE.THRESH_WITH_CTR
self.centerness_mode = cfg.MODEL.DAFNE.CENTERNESS
self.centerness_alpha = cfg.MODEL.DAFNE.CENTERNESS_ALPHA
self.has_centerness = self.centerness_mode != "none"
assert self.centerness_mode in ["none", "plain", "oriented"]
self.corner_prediction_strategy = cfg.MODEL.DAFNE.CORNER_PREDICTION
self.has_center_reg = self.corner_prediction_strategy == "center-to-corner"
self.num_classes = cfg.MODEL.DAFNE.NUM_CLASSES
self.strides = cfg.MODEL.DAFNE.FPN_STRIDES
# Lambdas
self.lambda_cls = cfg.MODEL.DAFNE.LOSS_LAMBDA.CLS
self.lambda_ctr = cfg.MODEL.DAFNE.LOSS_LAMBDA.CTR
self.lambda_corners = cfg.MODEL.DAFNE.LOSS_LAMBDA.CORNERS
self.lambda_center = cfg.MODEL.DAFNE.LOSS_LAMBDA.CENTER
self.lambda_ltrb = cfg.MODEL.DAFNE.LOSS_LAMBDA.LTRB
lambda_normalize = cfg.MODEL.DAFNE.LOSS_LAMBDA_NORM
if lambda_normalize:
self.normalize_lambdas()
# generate sizes of interest
soi = []
prev_size = -1
for s in cfg.MODEL.DAFNE.SIZES_OF_INTEREST:
soi.append([prev_size, s])
prev_size = s
soi.append([prev_size, INF])
self.sizes_of_interest = soi
def normalize_lambdas(self):
# Make them sum up to one
lambda_sum = self.lambda_cls + self.lambda_corners
if self.has_centerness:
lambda_sum += self.lambda_ctr
if self.has_center_reg:
lambda_sum += self.lambda_center
self.lambda_cls = self.lambda_cls / lambda_sum
self.lambda_ctr = self.lambda_ctr / lambda_sum
self.lambda_corners = self.lambda_corners / lambda_sum
self.lambda_center = self.lambda_center / lambda_sum
self.lambda_ltrb = self.lambda_ltrb / lambda_sum
def update_lambdas(
self,
lambda_cls=None,
lambda_ctr=None,
lambda_corners=None,
lambda_center=None,
normalize=False,
):
if lambda_cls is not None:
self.lambda_cls = lambda_cls
else:
self.lambda_cls = self.cfg.MODEL.DAFNE.LOSS_LAMBDA.CLS
if lambda_ctr is not None:
self.lambda_ctr = lambda_ctr
else:
self.lambda_ctr = self.cfg.MODEL.DAFNE.LOSS_LAMBDA.CTR
if lambda_corners is not None:
self.lambda_corners = lambda_corners
else:
self.lambda_corners = self.cfg.MODEL.DAFNE.LOSS_LAMBDA.CORNERS
if lambda_center is not None:
self.lambda_center = lambda_center
else:
self.lambda_center = self.cfg.MODEL.DAFNE.LOSS_LAMBDA.CENTER
if normalize:
self.normalize_lambdas()
def _transpose(self, training_targets, num_loc_list):
"""
This function is used to transpose image first training targets to level first ones
:return: level first training targets
"""
for im_i in range(len(training_targets)):
training_targets[im_i] = torch.split(training_targets[im_i], num_loc_list, dim=0)
targets_level_first = []
for targets_per_level in zip(*training_targets):
targets_level_first.append(torch.cat(targets_per_level, dim=0))
return targets_level_first
def _get_ground_truth(self, locations, gt_instances):
num_loc_list = [len(loc) for loc in locations]
# compute locations to size ranges
loc_to_size_range = []
for l, loc_per_level in enumerate(locations):
loc_to_size_range_per_level = loc_per_level.new_tensor(self.sizes_of_interest[l])
loc_to_size_range.append(loc_to_size_range_per_level[None].expand(num_loc_list[l], -1))
loc_to_size_range = torch.cat(loc_to_size_range, dim=0)
locations = torch.cat(locations, dim=0)
training_targets = self.compute_targets_for_locations(
locations, gt_instances, loc_to_size_range, num_loc_list
)
training_targets["locations"] = [locations.clone() for _ in range(len(gt_instances))]
training_targets["im_inds"] = [
locations.new_ones(locations.size(0), dtype=torch.long) * i
for i in range(len(gt_instances))
]
# transpose im first training_targets to level first ones
training_targets = {
k: self._transpose(v, num_loc_list) for k, v in training_targets.items()
}
training_targets["fpn_levels"] = [
loc.new_ones(len(loc), dtype=torch.long) * level
for level, loc in enumerate(training_targets["locations"])
]
# we normalize reg_targets by FPN's strides here
reg_targets_corners = training_targets["reg_targets_corners"]
reg_targets_ltrb = training_targets["reg_targets_ltrb"]
reg_targets_abcd = training_targets["reg_targets_abcd"]
if self.cfg.MODEL.DAFNE.ENABLE_FPN_STRIDE_NORM:
for l in range(len(reg_targets_corners)):
reg_targets_corners[l] = reg_targets_corners[l] / float(self.strides[l])
reg_targets_ltrb[l] = reg_targets_ltrb[l] / float(self.strides[l])
reg_targets_abcd[l] = reg_targets_abcd[l] / float(self.strides[l])
return training_targets
def get_sample_region(
self, boxes, strides, num_loc_list, loc_xs, loc_ys, bitmasks=None, radius=1
):
if bitmasks is not None:
_, h, w = bitmasks.size()
ys = torch.arange(0, h, dtype=torch.float32, device=bitmasks.device)
xs = torch.arange(0, w, dtype=torch.float32, device=bitmasks.device)
m00 = bitmasks.sum(dim=-1).sum(dim=-1).clamp(min=1e-6)
m10 = (bitmasks * xs).sum(dim=-1).sum(dim=-1)
m01 = (bitmasks * ys[:, None]).sum(dim=-1).sum(dim=-1)
center_x = m10 / m00
center_y = m01 / m00
else:
center_x = boxes[..., [0, 2]].sum(dim=-1) * 0.5
center_y = boxes[..., [1, 3]].sum(dim=-1) * 0.5
num_gts = boxes.shape[0]
K = len(loc_xs)
boxes = boxes[None].expand(K, num_gts, 4)
center_x = center_x[None].expand(K, num_gts)
center_y = center_y[None].expand(K, num_gts)
center_gt = boxes.new_zeros(boxes.shape)
# no gt
if center_x.numel() == 0 or center_x[..., 0].sum() == 0:
return loc_xs.new_zeros(loc_xs.shape, dtype=torch.uint8)
beg = 0
for level, num_loc in enumerate(num_loc_list):
end = beg + num_loc
stride = strides[level] * radius
xmin = center_x[beg:end] - stride
ymin = center_y[beg:end] - stride
xmax = center_x[beg:end] + stride
ymax = center_y[beg:end] + stride
# limit sample region in gt
center_gt[beg:end, :, 0] = torch.where(
xmin > boxes[beg:end, :, 0], xmin, boxes[beg:end, :, 0]
)
center_gt[beg:end, :, 1] = torch.where(
ymin > boxes[beg:end, :, 1], ymin, boxes[beg:end, :, 1]
)
center_gt[beg:end, :, 2] = torch.where(
xmax > boxes[beg:end, :, 2], boxes[beg:end, :, 2], xmax
)
center_gt[beg:end, :, 3] = torch.where(
ymax > boxes[beg:end, :, 3], boxes[beg:end, :, 3], ymax
)
beg = end
left = loc_xs[:, None] - center_gt[..., 0]
right = center_gt[..., 2] - loc_xs[:, None]
top = loc_ys[:, None] - center_gt[..., 1]
bottom = center_gt[..., 3] - loc_ys[:, None]
center_bbox = torch.stack((left, top, right, bottom), -1)
inside_gt_bbox_mask = center_bbox.min(-1)[0] > 0
return inside_gt_bbox_mask
def compute_targets_for_locations(self, locations, targets, size_ranges, num_loc_list):
labels = []
reg_targets_corners = []
reg_targets_ltrb = []
reg_targets_abcd = []
target_inds = []
xs, ys = locations[:, 0], locations[:, 1]
K = len(xs)
num_targets = 0
for im_i in range(len(targets)):
targets_per_im = targets[im_i]
bboxes = targets_per_im.gt_boxes.tensor
num_gts = bboxes.shape[0]
corners = targets_per_im.gt_corners
area = targets_per_im.gt_corners_area
labels_per_im = targets_per_im.gt_classes
locations_to_gt_area = area[None].repeat(K, 1)
# no gt
if bboxes.numel() == 0:
labels.append(labels_per_im.new_zeros(locations.size(0)) + self.num_classes)
reg_targets_ltrb.append(locations.new_zeros((locations.size(0), 4)))
reg_targets_abcd.append(locations.new_zeros((locations.size(0), 4)))
reg_targets_corners.append(locations.new_zeros((locations.size(0), 8)))
target_inds.append(labels_per_im.new_zeros(locations.size(0)) - 1)
continue
xs_ext = xs[:, None]
ys_ext = ys[:, None]
# Generate ltrb values
l = xs_ext - bboxes[:, 0][None]
t = ys_ext - bboxes[:, 1][None]
r = bboxes[:, 2][None] - xs_ext
b = bboxes[:, 3][None] - ys_ext
reg_targets_ltrb_per_im = torch.stack([l, t, r, b], dim=2)
reg_targets_abcd_per_im = compute_abcd(corners, xs_ext, ys_ext)
# Compute corner w.r.t. locations (expand for each location)
x0_centered = corners[:, 0][None] - xs_ext
y0_centered = corners[:, 1][None] - ys_ext
x1_centered = corners[:, 2][None] - xs_ext
y1_centered = corners[:, 3][None] - ys_ext
x2_centered = corners[:, 4][None] - xs_ext
y2_centered = corners[:, 5][None] - ys_ext
x3_centered = corners[:, 6][None] - xs_ext
y3_centered = corners[:, 7][None] - ys_ext
reg_targets_corners_per_im = torch.stack(
[
x0_centered,
y0_centered,
x1_centered,
y1_centered,
x2_centered,
y2_centered,
x3_centered,
y3_centered,
],
dim=2,
)
if self.center_sample:
if targets_per_im.has("gt_bitmasks_full"):
bitmasks = targets_per_im.gt_bitmasks_full
else:
bitmasks = None
is_in_boxes_center_sampling = self.get_sample_region(
bboxes,
self.strides,
num_loc_list,
xs,
ys,
bitmasks=bitmasks,
radius=self.radius,
)
else:
is_in_boxes_center_sampling = reg_targets_ltrb_per_im.min(dim=2)[0] > 0
# is_in_boxes = is_in_boxes_ltrb
if self.cfg.MODEL.DAFNE.CENTER_SAMPLE_ONLY:
# Only use center sampling
is_in_boxes = is_in_boxes_center_sampling
else:
# IS_IN_BOXES for quadrilateral
corners_rep = corners[None].repeat(K, 1, 1)
is_in_boxes_quad = is_in_quadrilateral(
corners_rep[..., 0:2],
corners_rep[..., 2:4],
corners_rep[..., 4:6],
corners_rep[..., 6:8],
locations_to_gt_area,
locations[:, None],
)
# Combine center_sampling + is_in_quadrilateral with logical and
if self.cfg.MODEL.DAFNE.COMBINE_CENTER_SAMPLE:
is_in_boxes = is_in_boxes_center_sampling & is_in_boxes_quad
else:
# Only use box-check sampling
is_in_boxes = is_in_boxes_quad
max_reg_targets_per_im = reg_targets_ltrb_per_im.max(dim=2)[0]
# limit the regression range for each location
is_cared_in_the_level = (max_reg_targets_per_im >= size_ranges[:, [0]]) & (
max_reg_targets_per_im <= size_ranges[:, [1]]
)
if self.cfg.MODEL.DAFNE.ENABLE_IN_BOX_CHECK:
locations_to_gt_area[is_in_boxes == 0] = INF
if self.cfg.MODEL.DAFNE.ENABLE_LEVEL_SIZE_FILTERING:
locations_to_gt_area[is_cared_in_the_level == 0] = INF
# if there are still more than one objects for a location,
# we choose the one with minimal area
locations_to_min_area, locations_to_gt_inds = locations_to_gt_area.min(dim=1)
reg_targets_ltrb_per_im = reg_targets_ltrb_per_im[
range(len(locations)), locations_to_gt_inds
]
reg_targets_abcd_per_im = reg_targets_abcd_per_im[
range(len(locations)), locations_to_gt_inds
]
reg_targets_corners_per_im = reg_targets_corners_per_im[
range(len(locations)), locations_to_gt_inds
]
target_inds_per_im = locations_to_gt_inds + num_targets
num_targets += len(targets_per_im)
labels_per_im = labels_per_im[locations_to_gt_inds]
labels_per_im[locations_to_min_area == INF] = self.num_classes
labels.append(labels_per_im)
reg_targets_ltrb.append(reg_targets_ltrb_per_im)
reg_targets_abcd.append(reg_targets_abcd_per_im)
reg_targets_corners.append(reg_targets_corners_per_im)
target_inds.append(target_inds_per_im)
return {
"labels": labels,
"reg_targets_ltrb": reg_targets_ltrb,
"reg_targets_abcd": reg_targets_abcd,
"reg_targets_corners": reg_targets_corners,
"target_inds": target_inds,
}
def losses(
self,
logits_pred,
corners_reg_pred,
center_reg_pred,
ltrb_reg_pred,
ctrness_pred,
locations,
gt_instances,
top_feats=None,
):
"""
Return the losses from a set of DAFNE predictions and their associated ground-truth.
Returns:
dict[loss name -> loss value]: A dict mapping from loss name to loss value.
"""
training_targets = self._get_ground_truth(locations, gt_instances)
# Collect all logits and regression predictions over feature maps
# and images to arrive at the same shape as the labels and targets
# The final ordering is L, N, H, W from slowest to fastest axis.
instances = Instances((0, 0))
instances.labels = cat(
[
# Reshape: (N, 1, Hi, Wi) -> (N*Hi*Wi,)
x.reshape(-1)
for x in training_targets["labels"]
],
dim=0,
)
instances.gt_inds = cat(
[
# Reshape: (N, 1, Hi, Wi) -> (N*Hi*Wi,)
x.reshape(-1)
for x in training_targets["target_inds"]
],
dim=0,
)
instances.im_inds = cat([x.reshape(-1) for x in training_targets["im_inds"]], dim=0)
instances.reg_targets_corners = cat(
[
# Reshape: (N, Hi, Wi, 8) -> (N*Hi*Wi, 8)
x.reshape(-1, 8)
for x in training_targets["reg_targets_corners"]
],
dim=0,
)
instances.reg_targets_ltrb = cat(
[
# Reshape: (N, Hi, Wi, 4) -> (N*Hi*Wi, 4)
x.reshape(-1, 4)
for x in training_targets["reg_targets_ltrb"]
],
dim=0,
)
instances.reg_targets_abcd = cat(
[
# Reshape: (N, Hi, Wi, 4) -> (N*Hi*Wi, 4)
x.reshape(-1, 4)
for x in training_targets["reg_targets_abcd"]
],
dim=0,
)
instances.locations = cat([x.reshape(-1, 2) for x in training_targets["locations"]], dim=0)
instances.fpn_levels = cat([x.reshape(-1) for x in training_targets["fpn_levels"]], dim=0)
instances.logits_pred = cat(
[
# Reshape: (N, C, Hi, Wi) -> (N, Hi, Wi, C) -> (N*Hi*Wi, C)
x.permute(0, 2, 3, 1).reshape(-1, self.num_classes)
for x in logits_pred
],
dim=0,
)
instances.corners_reg_pred = cat(
[
# Reshape: (N, B, Hi, Wi) -> (N, Hi, Wi, B) -> (N*Hi*Wi, B)
x.permute(0, 2, 3, 1).reshape(-1, 8)
for x in corners_reg_pred
],
dim=0,
)
if self.has_center_reg:
instances.center_reg_pred = cat(
[x.permute(0, 2, 3, 1).reshape(-1, 2) for x in center_reg_pred],
dim=0,
)
if self.has_centerness:
instances.ctrness_pred = cat(
[
# Reshape: (N, 1, Hi, Wi) -> (N*Hi*Wi,)
x.permute(0, 2, 3, 1).reshape(-1)
for x in ctrness_pred
],
dim=0,
)
if len(top_feats) > 0:
instances.top_feats = cat(
[
# Reshape: (N, -1, Hi, Wi) -> (N*Hi*Wi, -1)
x.permute(0, 2, 3, 1).reshape(-1, x.size(1))
for x in top_feats
],
dim=0,
)
return self.dafne_losses(instances)
def dafne_losses(self, instances):
num_classes = instances.logits_pred.size(1)
assert num_classes == self.num_classes
labels = instances.labels.flatten()
pos_inds = torch.nonzero(labels != num_classes).squeeze(1)
num_pos_local = pos_inds.numel()
num_gpus = get_world_size()
total_num_pos = reduce_sum(pos_inds.new_tensor([num_pos_local])).item()
num_pos_avg = max(total_num_pos / num_gpus, 1.0)
# prepare one_hot
class_target = torch.zeros_like(instances.logits_pred)
class_target[pos_inds, labels[pos_inds]] = 1
class_loss = (
sigmoid_focal_loss_jit(
instances.logits_pred,
class_target,
alpha=self.focal_loss_alpha,
gamma=self.focal_loss_gamma,
reduction="sum",
)
/ num_pos_avg
)
instances = instances[pos_inds]
instances.pos_inds = pos_inds
if self.centerness_mode == "oriented":
ctrness_targets = compute_ctrness_targets(
instances.reg_targets_abcd, self.centerness_alpha
)
elif self.centerness_mode == "plain":
ctrness_targets = compute_ctrness_targets(
instances.reg_targets_ltrb, self.centerness_alpha
)
else:
ctrness_targets = compute_ctrness_targets(
instances.reg_targets_abcd, self.centerness_alpha
)
ctrness_targets[:] = 1.0
ctrness_targets_sum = ctrness_targets.sum()
loss_denorm = max(reduce_sum(ctrness_targets_sum).item() / num_gpus, 1e-6)
instances.gt_ctrs = ctrness_targets
if pos_inds.numel() > 0:
# Sort corners if flag is set
# NOTE: targets are sorted in the datasetmapper
if self.sort_corners:
instances.corners_reg_pred = sort_quadrilateral(instances.corners_reg_pred)
corners_reg_loss = (
self.corners_loss_func(
instances.corners_reg_pred,
instances.reg_targets_corners,
ctrness_targets,
)
/ loss_denorm
)
reg_targets_center = instances.reg_targets_corners.view(-1, 4, 2).mean(1)
if self.has_center_reg:
center_reg_loss = (
self.center_loss_func(
instances.center_reg_pred,
reg_targets_center,
ctrness_targets,
)
/ loss_denorm
)
if self.has_centerness:
ctrness_loss = (
F.binary_cross_entropy_with_logits(
instances.ctrness_pred, ctrness_targets, reduction="sum"
)
/ num_pos_avg
)
else:
corners_reg_loss = instances.corners_reg_pred.sum() * 0
if self.has_center_reg:
center_reg_loss = instances.center_reg_pred.sum() * 0
if self.has_centerness:
ctrness_loss = instances.ctrness_pred.sum() * 0
# Apply lambdas
class_loss = class_loss * self.lambda_cls
corners_reg_loss = corners_reg_loss * self.lambda_corners
losses = {
"loss/cls": class_loss,
"loss/corners": corners_reg_loss,
}
# Add center reg
if self.has_center_reg:
losses["loss/center"] = center_reg_loss * self.lambda_center
# Add centerness if not none
if self.has_centerness:
losses["loss/ctr"] = ctrness_loss * self.lambda_ctr
extras = {"instances": instances, "loss_denorm": loss_denorm}
return extras, losses
def predict_proposals(
self,
logits_pred,
corners_reg_pred,
ctrness_pred,
locations,
image_sizes,
top_feats=None,
):
if self.training:
self.pre_nms_thresh = self.pre_nms_thresh_train
self.pre_nms_topk = self.pre_nms_topk_train
self.post_nms_topk = self.post_nms_topk_train
else:
self.pre_nms_thresh = self.pre_nms_thresh_test
self.pre_nms_topk = self.pre_nms_topk_test
self.post_nms_topk = self.post_nms_topk_test
sampled_boxes = []
bundle = {
"l": locations,
"o": logits_pred,
"rc": corners_reg_pred,
"c": ctrness_pred,
"s": self.strides,
}
if len(top_feats) > 0:
bundle["t"] = top_feats
for i, per_bundle in enumerate(zip(*bundle.values())):
# get per-level bundle
per_bundle = dict(zip(bundle.keys(), per_bundle))
# recall that during training, we normalize regression targets with FPN's stride.
# we denormalize them here.
l = per_bundle["l"]
o = per_bundle["o"]
if self.cfg.MODEL.DAFNE.ENABLE_FPN_STRIDE_NORM:
rc = per_bundle["rc"] * per_bundle["s"]
else:
rc = per_bundle["rc"]
c = per_bundle["c"]
t = per_bundle["t"] if "t" in bundle else None
sampled_boxes.append(self.forward_for_single_feature_map(l, o, rc, c, image_sizes, t))
for per_im_sampled_boxes in sampled_boxes[-1]:
per_im_sampled_boxes.fpn_levels = (
l.new_ones(len(per_im_sampled_boxes), dtype=torch.long) * i
)
boxlists = list(zip(*sampled_boxes))
boxlists = [Instances.cat(boxlist) for boxlist in boxlists]
boxlists = self.select_over_all_levels(boxlists)
return boxlists
def forward_for_single_feature_map(
self,
locations,
logits_pred,
corners_reg_pred,
ctrness_pred,
image_sizes,
top_feat=None,
):
N, C, H, W = logits_pred.shape
# put in the same format as locations
logits_pred = logits_pred.view(N, C, H, W).permute(0, 2, 3, 1)
cls_pred = logits_pred.reshape(N, -1, C).sigmoid()
box_regression_corners = corners_reg_pred.view(N, 8, H, W).permute(0, 2, 3, 1)
box_regression_corners = box_regression_corners.reshape(N, -1, 8)
ctrness_pred = ctrness_pred.view(N, 1, H, W).permute(0, 2, 3, 1)
ctrness_pred = ctrness_pred.reshape(N, -1)
# Only apply sigmoid if centerness is enabled, else keep dummy "1.0" values
if self.has_centerness:
ctrness_pred = ctrness_pred.sigmoid()
if top_feat is not None:
top_feat = top_feat.view(N, -1, H, W).permute(0, 2, 3, 1)
top_feat = top_feat.reshape(N, H * W, -1)
# if self.thresh_with_ctr is True, we multiply the classification
# scores with centerness scores before applying the threshold.
if self.has_centerness and self.thresh_with_ctr:
cls_pred = torch.sqrt(cls_pred * ctrness_pred[:, :, None])
candidate_inds = cls_pred > self.pre_nms_thresh
pre_nms_top_n = candidate_inds.reshape(N, -1).sum(1)
pre_nms_top_n = pre_nms_top_n.clamp(max=self.pre_nms_topk)
if self.has_centerness and not self.thresh_with_ctr:
cls_pred = torch.sqrt(cls_pred * ctrness_pred[:, :, None])
results = []
for i in range(N):
per_box_cls = cls_pred[i]
per_candidate_inds = candidate_inds[i]
per_box_cls = per_box_cls[per_candidate_inds]
per_candidate_nonzeros = per_candidate_inds.nonzero()
per_box_loc = per_candidate_nonzeros[:, 0]
per_class = per_candidate_nonzeros[:, 1]
per_box_regression_corners = box_regression_corners[i]
per_box_regression_corners = per_box_regression_corners[per_box_loc]
per_locations = locations[per_box_loc]
per_box_centerness = ctrness_pred[i, per_box_loc]
if top_feat is not None:
per_top_feat = top_feat[i]
per_top_feat = per_top_feat[per_box_loc]
per_pre_nms_top_n = pre_nms_top_n[i]
if per_candidate_inds.sum().item() > per_pre_nms_top_n.item():
per_box_cls, top_k_indices = per_box_cls.topk(per_pre_nms_top_n, sorted=False)
per_class = per_class[top_k_indices]
per_box_regression_corners = per_box_regression_corners[top_k_indices]
per_locations = per_locations[top_k_indices]
per_box_centerness = per_box_centerness[top_k_indices]
if top_feat is not None:
per_top_feat = per_top_feat[top_k_indices]
detections_poly = torch.stack(
[
per_locations[:, 0] + per_box_regression_corners[:, 0],
per_locations[:, 1] + per_box_regression_corners[:, 1],
per_locations[:, 0] + per_box_regression_corners[:, 2],
per_locations[:, 1] + per_box_regression_corners[:, 3],
per_locations[:, 0] + per_box_regression_corners[:, 4],
per_locations[:, 1] + per_box_regression_corners[:, 5],
per_locations[:, 0] + per_box_regression_corners[:, 6],
per_locations[:, 1] + per_box_regression_corners[:, 7],
],
dim=1,
)
# Sort quadrilateral to have a canonical representation
if self.sort_corners:
detections_poly = sort_quadrilateral(detections_poly)
if type(image_sizes[i]) == torch.Tensor:
image_size = tuple(image_sizes[i].tolist())
else:
image_size = image_sizes[i]
boxlist = Instances(image_size)
# Generate surrounding hboxes from corners
if detections_poly.shape[0] > 0:
xmin = torch.min(detections_poly[:, 0::2], dim=1).values
xmax = torch.max(detections_poly[:, 0::2], dim=1).values
ymin = torch.min(detections_poly[:, 1::2], dim=1).values
ymax = torch.max(detections_poly[:, 1::2], dim=1).values
hbboxes = torch.stack((xmin, ymin, xmax, ymax), dim=1)
else:
hbboxes = detections_poly.new_empty(0, 4)
boxlist.pred_boxes = Boxes(hbboxes)
boxlist.pred_corners = detections_poly
boxlist.scores = per_box_cls
boxlist.centerness = per_box_centerness
# boxlist.scores = torch.sqrt(per_box_cls)
boxlist.pred_classes = per_class
boxlist.locations = per_locations
if top_feat is not None:
boxlist.top_feat = per_top_feat
results.append(boxlist)
return results
def select_over_all_levels(self, boxlists):
num_images = len(boxlists)
results = []
for i in range(num_images):
# multiclass nms
result = ml_nms(boxlists[i], self.nms_thresh)
number_of_detections = len(result)
# Limit to max_per_image detections **over all classes**
if number_of_detections > self.post_nms_topk > 0:
cls_scores = result.scores
image_thresh, _ = torch.kthvalue(
cls_scores.cpu(), number_of_detections - self.post_nms_topk + 1
)
keep = cls_scores >= image_thresh.item()
keep = torch.nonzero(keep).squeeze(1)
result = result[keep]
results.append(result)
return results
| 37.24946
| 111
| 0.578146
|
eb3df10882b8834c9ebc8627bce6c2adff47c387
| 9,665
|
py
|
Python
|
pimdm/rwlock/RWLock.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 6
|
2020-02-04T20:59:59.000Z
|
2021-11-24T09:56:07.000Z
|
pimdm/rwlock/RWLock.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 4
|
2020-04-10T14:51:39.000Z
|
2022-02-14T00:59:21.000Z
|
pimdm/rwlock/RWLock.py
|
leoplo/pim_dm
|
e097fb8e247b14f142b6aa97d8ee34440aeba806
|
[
"MIT"
] | 3
|
2020-08-13T17:56:35.000Z
|
2021-11-24T11:03:12.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Read Write Lock
"""
import threading
import time
class RWLockRead(object):
"""
A Read/Write lock giving preference to Reader
"""
def __init__(self):
self.V_ReadCount = 0
self.A_Resource = threading.Lock()
self.A_LockReadCount = threading.Lock()
class _aReader(object):
def __init__(self, p_RWLock):
self.A_RWLock = p_RWLock
self.V_Locked = False
def acquire(self, blocking=1, timeout=-1):
p_TimeOut = None if (blocking and timeout < 0) else (timeout if blocking else 0)
c_DeadLine = None if p_TimeOut is None else (time.time() + p_TimeOut)
if not self.A_RWLock.A_LockReadCount.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
return False
self.A_RWLock.V_ReadCount += 1
if self.A_RWLock.V_ReadCount == 1:
if not self.A_RWLock.A_Resource.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.V_ReadCount -= 1
self.A_RWLock.A_LockReadCount.release()
return False
self.A_RWLock.A_LockReadCount.release()
self.V_Locked = True
return True
def release(self):
if not self.V_Locked: raise RuntimeError("cannot release un-acquired lock")
self.V_Locked = False
self.A_RWLock.A_LockReadCount.acquire()
self.A_RWLock.V_ReadCount -= 1
if self.A_RWLock.V_ReadCount == 0:
self.A_RWLock.A_Resource.release()
self.A_RWLock.A_LockReadCount.release()
def locked(self):
return self.V_Locked
def __enter__(self):
self.acquire()
def __exit__(self, p_Type, p_Value, p_Traceback):
self.release()
class _aWriter(object):
def __init__(self, p_RWLock):
self.A_RWLock = p_RWLock
self.V_Locked = False
def acquire(self, blocking=1, timeout=-1):
self.V_Locked = self.A_RWLock.A_Resource.acquire(blocking, timeout)
return self.V_Locked
def release(self):
if not self.V_Locked: raise RuntimeError("cannot release un-acquired lock")
self.V_Locked = False
self.A_RWLock.A_Resource.release()
def locked(self):
return self.V_Locked
def __enter__(self):
self.acquire()
def __exit__(self, p_Type, p_Value, p_Traceback):
self.release()
def genRlock(self):
"""
Generate a reader lock
"""
return RWLockRead._aReader(self)
def genWlock(self):
"""
Generate a writer lock
"""
return RWLockRead._aWriter(self)
class RWLockWrite(object):
"""
A Read/Write lock giving preference to Writer
"""
def __init__(self):
self.V_ReadCount = 0
self.V_WriteCount = 0
self.A_LockReadCount = threading.Lock()
self.A_LockWriteCount = threading.Lock()
self.A_LockReadEntry = threading.Lock()
self.A_LockReadTry = threading.Lock()
self.A_Resource = threading.Lock()
class _aReader(object):
def __init__(self, p_RWLock):
self.A_RWLock = p_RWLock
self.V_Locked = False
def acquire(self, blocking=1, timeout=-1):
p_TimeOut = None if (blocking and timeout < 0) else (timeout if blocking else 0)
c_DeadLine = None if p_TimeOut is None else (time.time() + p_TimeOut)
if not self.A_RWLock.A_LockReadEntry.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
return False
if not self.A_RWLock.A_LockReadTry.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.A_LockReadEntry.release()
return False
if not self.A_RWLock.A_LockReadCount.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.A_LockReadTry.release()
self.A_RWLock.A_LockReadEntry.release()
return False
self.A_RWLock.V_ReadCount += 1
if (self.A_RWLock.V_ReadCount == 1):
if not self.A_RWLock.A_Resource.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.A_LockReadTry.release()
self.A_RWLock.A_LockReadEntry.release()
self.A_RWLock.V_ReadCount -= 1
self.A_RWLock.A_LockReadCount.release()
return False
self.A_RWLock.A_LockReadCount.release()
self.A_RWLock.A_LockReadTry.release()
self.A_RWLock.A_LockReadEntry.release()
self.V_Locked = True
return True
def release(self):
if not self.V_Locked: raise RuntimeError("cannot release un-acquired lock")
self.V_Locked = False
self.A_RWLock.A_LockReadCount.acquire()
self.A_RWLock.V_ReadCount -= 1
if (self.A_RWLock.V_ReadCount == 0):
self.A_RWLock.A_Resource.release()
self.A_RWLock.A_LockReadCount.release()
def locked(self):
return self.V_Locked
def __enter__(self):
self.acquire()
def __exit__(self, p_Type, p_Value, p_Traceback):
self.release()
class _aWriter(object):
def __init__(self, p_RWLock):
self.A_RWLock = p_RWLock
self.V_Locked = False
def acquire(self, blocking=1, timeout=-1):
p_TimeOut = None if (blocking and timeout < 0) else (timeout if blocking else 0)
c_DeadLine = None if p_TimeOut is None else (time.time() + p_TimeOut)
if not self.A_RWLock.A_LockWriteCount.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
return False
self.A_RWLock.V_WriteCount += 1
if (self.A_RWLock.V_WriteCount == 1):
if not self.A_RWLock.A_LockReadTry.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.V_WriteCount -= 1
self.A_RWLock.A_LockWriteCount.release()
return False
self.A_RWLock.A_LockWriteCount.release()
if not self.A_RWLock.A_Resource.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.A_LockWriteCount.acquire()
self.A_RWLock.V_WriteCount -= 1
if self.A_RWLock.V_WriteCount == 0:
self.A_RWLock.A_LockReadTry.release()
self.A_RWLock.A_LockWriteCount.release()
return False
self.V_Locked = True
return True
def release(self):
if not self.V_Locked: raise RuntimeError("cannot release un-acquired lock")
self.V_Locked = False
self.A_RWLock.A_Resource.release()
self.A_RWLock.A_LockWriteCount.acquire()
self.A_RWLock.V_WriteCount -= 1
if (self.A_RWLock.V_WriteCount == 0):
self.A_RWLock.A_LockReadTry.release()
self.A_RWLock.A_LockWriteCount.release()
def locked(self):
return self.V_Locked
def __enter__(self):
self.acquire()
def __exit__(self, p_Type, p_Value, p_Traceback):
self.release()
def genRlock(self):
"""
Generate a reader lock
"""
return RWLockWrite._aReader(self)
def genWlock(self):
"""
Generate a writer lock
"""
return RWLockWrite._aWriter(self)
class RWLockFair(object):
"""
A Read/Write lock giving fairness to both Reader and Writer
"""
def __init__(self):
self.V_ReadCount = 0
self.A_LockReadCount = threading.Lock()
self.A_LockRead = threading.Lock()
self.A_LockWrite = threading.Lock()
class _aReader(object):
def __init__(self, p_RWLock):
self.A_RWLock = p_RWLock
self.V_Locked = False
def acquire(self, blocking=1, timeout=-1):
p_TimeOut = None if (blocking and timeout < 0) else (timeout if blocking else 0)
c_DeadLine = None if p_TimeOut is None else (time.time() + p_TimeOut)
if not self.A_RWLock.A_LockRead.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
return False
if not self.A_RWLock.A_LockReadCount.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.A_LockRead.release()
return False
self.A_RWLock.V_ReadCount += 1
if self.A_RWLock.V_ReadCount == 1:
if not self.A_RWLock.A_LockWrite.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.V_ReadCount -= 1
self.A_RWLock.A_LockReadCount.release()
self.A_RWLock.A_LockRead.release()
return False
self.A_RWLock.A_LockReadCount.release()
self.A_RWLock.A_LockRead.release()
self.V_Locked = True
return True
def release(self):
if not self.V_Locked: raise RuntimeError("cannot release un-acquired lock")
self.V_Locked = False
self.A_RWLock.A_LockReadCount.acquire()
self.A_RWLock.V_ReadCount -= 1
if self.A_RWLock.V_ReadCount == 0:
self.A_RWLock.A_LockWrite.release()
self.A_RWLock.A_LockReadCount.release()
def locked(self):
return self.V_Locked
def __enter__(self):
self.acquire()
def __exit__(self, p_Type, p_Value, p_Traceback):
self.release()
class _aWriter(object):
def __init__(self, p_RWLock):
self.A_RWLock = p_RWLock
self.V_Locked = False
def acquire(self, blocking=1, timeout=-1):
p_TimeOut = None if (blocking and timeout < 0) else (timeout if blocking else 0)
c_DeadLine = None if p_TimeOut is None else (time.time() + p_TimeOut)
if not self.A_RWLock.A_LockRead.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
return False
if not self.A_RWLock.A_LockWrite.acquire(blocking=1, timeout=-1 if c_DeadLine is None else max(0, c_DeadLine - time.time())):
self.A_RWLock.A_LockRead.release()
return False
self.V_Locked = True
return True
def release(self):
if not self.V_Locked: raise RuntimeError("cannot release un-acquired lock")
self.V_Locked = False
self.A_RWLock.A_LockWrite.release()
self.A_RWLock.A_LockRead.release()
def locked(self):
return self.V_Locked
def __enter__(self):
self.acquire()
def __exit__(self, p_Type, p_Value, p_Traceback):
self.release()
def genRlock(self):
"""
Generate a reader lock
"""
return RWLockFair._aReader(self)
def genWlock(self):
"""
Generate a writer lock
"""
return RWLockFair._aWriter(self)
| 36.198502
| 133
| 0.720848
|
48bf7dec1a9ee833e16512885e02cea51f8dd891
| 172
|
py
|
Python
|
AtCoder/ABC/000-159/ABC135_B.py
|
sireline/PyCode
|
8578467710c3c1faa89499f5d732507f5d9a584c
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/000-159/ABC135_B.py
|
sireline/PyCode
|
8578467710c3c1faa89499f5d732507f5d9a584c
|
[
"MIT"
] | null | null | null |
AtCoder/ABC/000-159/ABC135_B.py
|
sireline/PyCode
|
8578467710c3c1faa89499f5d732507f5d9a584c
|
[
"MIT"
] | null | null | null |
N = int(input())
P = [int(n) for n in input().split()]
X = [n for n in range(1, N+1)]
ans = sum([0 if p==x else 1 for p,x in zip(P, X)])
print('YES' if ans <= 2 else 'NO')
| 28.666667
| 50
| 0.546512
|
8bcdaa745ff3b0c1886791f4fe795e270a4e85c7
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/poetry/console/logging/io_formatter.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/poetry/console/logging/io_formatter.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/poetry/console/logging/io_formatter.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/6d/22/53/c55351916bc6e330317cf82fa911c4318c64258321ebbccd05d85f5c26
| 96
| 96
| 0.895833
|
dab4b043f209fe9a3a7aed1c8587d39d176641b7
| 1,887
|
py
|
Python
|
redbot/syntax/rfc7234.py
|
Malvoz/redbot
|
0edef8d4efefddde49d36cd97e471fc187837169
|
[
"MIT"
] | 167
|
2015-01-07T16:34:56.000Z
|
2022-02-20T15:20:06.000Z
|
redbot/syntax/rfc7234.py
|
Malvoz/redbot
|
0edef8d4efefddde49d36cd97e471fc187837169
|
[
"MIT"
] | 180
|
2015-02-01T01:37:53.000Z
|
2022-02-17T04:32:01.000Z
|
redbot/syntax/rfc7234.py
|
Malvoz/redbot
|
0edef8d4efefddde49d36cd97e471fc187837169
|
[
"MIT"
] | 32
|
2015-05-20T21:00:13.000Z
|
2022-02-16T10:14:15.000Z
|
# pylint: disable=line-too-long, unused-import
"""
Regex for RFC7234
These regex are directly derived from the collected ABNF in RFC7234.
<http://httpwg.org/specs/rfc7234.html#collected.abnf>
They should be processed with re.VERBOSE.
"""
from .rfc3986 import port, host as uri_host
from .rfc5234 import DIGIT, DQUOTE, SP
from .rfc7230 import list_rule, OWS, field_name, pseudonym, quoted_string, token
from .rfc7231 import HTTP_date
SPEC_URL = "http://httpwg.org/specs/rfc7234"
# delta-seconds = 1*DIGIT
delta_seconds = r"{DIGIT}+".format(**locals())
# Age = delta-seconds
Age = delta_seconds
# cache-directive = token [ "=" ( token / quoted-string ) ]
cache_directive = r"(?: {token} (?: = (?: {token} | {quoted_string} ) )? )".format(
**locals()
)
# Cache-Control = 1#cache-directive
Cache_Control = list_rule(cache_directive, 1)
# Expires = HTTP-date
Expires = HTTP_date
# extension-pragma = token [ "=" ( token / quoted-string ) ]
extension_pragma = r"(?: {token} (?: = (?: {token} | {quoted_string} ) )? )".format(
**locals()
)
# pragma-directive = "no-cache" / extension-pragma
pragma_directive = r"(?: no-cache | {extension_pragma} )".format(**locals())
# Pragma = 1#pragma-directive
Pragma = list_rule(pragma_directive, 1)
# warn-agent = ( uri-host [ ":" port ] ) / pseudonym
warn_agent = r"(?: (?: {uri_host} (?: : {port} )? ) | {pseudonym} )"
# warn-code = 3DIGIT
warn_code = r"{DIGIT}{{3}}".format(**locals())
# warn-date = DQUOTE HTTP-date DQUOTE
warn_date = r"(?: {DQUOTE} {HTTP_date} {DQUOTE} )".format(**locals())
# warn-text = quoted-string
warn_text = quoted_string
# warning-value = warn-code SP warn-agent SP warn-text [ SP warn-date ]
warning_value = r"(?: {warn_code} {SP} {warn_agent} {SP} {warn_text} (?: {SP} {warn_date} )? )".format(
**locals()
)
# Warning = 1#warning-value
Warning = list_rule(warning_value, 1)
| 23.012195
| 103
| 0.665077
|
91d05572d93d3711fdaf1f014ff4e4c4739f0d19
| 2,962
|
py
|
Python
|
vega/networks/pytorch/losses/sum_loss.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 724
|
2020-06-22T12:05:30.000Z
|
2022-03-31T07:10:54.000Z
|
vega/networks/pytorch/losses/sum_loss.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 147
|
2020-06-30T13:34:46.000Z
|
2022-03-29T11:30:17.000Z
|
vega/networks/pytorch/losses/sum_loss.py
|
jie311/vega
|
1bba6100ead802697e691403b951e6652a99ccae
|
[
"MIT"
] | 160
|
2020-06-29T18:27:58.000Z
|
2022-03-23T08:42:21.000Z
|
# -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Sum_loss for detection task."""
import torch
from torch import nn
from collections import OrderedDict
from vega.common import ClassType, ClassFactory
import os
import pickle
import logging
@ClassFactory.register(ClassType.LOSS)
class SumLoss(nn.Module):
"""Calculate sum of input losses."""
def __init__(self):
"""Init SumLoss."""
super(SumLoss, self).__init__()
def forward(self, input, target=None):
"""Calculate sum of input losses.
:param input: dict of losses.
:type input: dict
:param target: `target` Tensor, default None.
:type target: type torch.Tensor
:return: return sum of losses.
:rtype: torch.Tensor
"""
losses = input
if not isinstance(losses, dict) and not isinstance(losses, OrderedDict):
return None
log_vars = OrderedDict()
for loss_name, loss_value in losses.items():
if isinstance(loss_value, torch.Tensor):
log_vars[loss_name] = loss_value.mean()
elif isinstance(loss_value, list):
log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value)
else:
raise TypeError(
'{} is not a tensor or list of tensors'.format(loss_name))
init_loss = [_value for _key, _value in log_vars.items() if 'loss' in _key]
if hasattr(self, "dynamic_loss_weight"):
# save the init loss
loss_save = [float(_value.detach().cpu().numpy()) for _value in init_loss]
save_file = os.path.join(self.save_path, "muti_loss.pkl")
with open(save_file, "wb") as f:
pickle.dump(loss_save, f)
if len(self.dynamic_loss_weight) != len(init_loss):
logging.error("The length of the loss must be same with the length of the weight, but got {} and {}"
.format(len(init_loss), len(self.dynamic_loss_weight)))
weighted_loss = [self.dynamic_loss_weight[i] * init_loss[i] for i in range(len(init_loss))]
sum_loss = sum(weighted_loss)
else:
sum_loss = sum(init_loss)
# Debug
"""
if loss > 100:
logging.error(str(losses))
import os
os._exit()
"""
return sum_loss
def adaptive_muti_loss(self, save_path, weight):
"""Set adaptive muti loss params."""
self.save_path = save_path
self.dynamic_loss_weight = weight
| 35.686747
| 116
| 0.618501
|
eee0da47127801d1004a8e8f8752711f7492fb55
| 512
|
py
|
Python
|
quick_time/time.py
|
keegang6705/instant-time
|
56fbd61ccd01ddebfb545289adb1b7dc4e52ae6f
|
[
"MIT"
] | null | null | null |
quick_time/time.py
|
keegang6705/instant-time
|
56fbd61ccd01ddebfb545289adb1b7dc4e52ae6f
|
[
"MIT"
] | null | null | null |
quick_time/time.py
|
keegang6705/instant-time
|
56fbd61ccd01ddebfb545289adb1b7dc4e52ae6f
|
[
"MIT"
] | null | null | null |
import time
from datetime import datetime
import pytz
def timestamp(timezone: pytz.timezone,integer: bool =False):
tme = datetime.now(pytz.timezone(f"{timezone}"))
tmstamp = datetime.timestamp(tme)
if integer == True:
return int(tmstamp)
elif integer == False:
return tmstamp
else:
return tmstamp
def date(timezone: pytz.timezone):
tme = datetime.now(pytz.timezone(f"{timezone}"))
tmf = tme.strftime(f"%A %d %B(%m) %G | %H:%M:%S")
return tmf
| 32
| 61
| 0.634766
|
32d50f1134f355d05c8ff793ff37bc03f44053b2
| 7,859
|
py
|
Python
|
netapp/santricity/models/v2/storage_pool_create_request.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 5
|
2016-08-23T17:52:22.000Z
|
2019-05-16T08:45:30.000Z
|
netapp/santricity/models/v2/storage_pool_create_request.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 2
|
2016-11-10T05:30:21.000Z
|
2019-04-05T15:03:37.000Z
|
netapp/santricity/models/v2/storage_pool_create_request.py
|
NetApp/santricity-webapi-pythonsdk
|
1d3df4a00561192f4cdcdd1890f4d27547ed2de2
|
[
"BSD-3-Clause-Clear"
] | 7
|
2016-08-25T16:11:44.000Z
|
2021-02-22T05:31:25.000Z
|
# coding: utf-8
"""
StoragePoolCreateRequest.py
The Clear BSD License
Copyright (c) – 2016, NetApp, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the limitations in the disclaimer below) provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of NetApp, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from pprint import pformat
from six import iteritems
class StoragePoolCreateRequest(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
StoragePoolCreateRequest - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'raid_level': 'str', # (required parameter)
'disk_drive_ids': 'list[str]', # (required parameter)
'erase_secured_drives': 'bool',
'name': 'str'
}
self.attribute_map = {
'raid_level': 'raidLevel', # (required parameter)
'disk_drive_ids': 'diskDriveIds', # (required parameter)
'erase_secured_drives': 'eraseSecuredDrives',
'name': 'name'
}
self._raid_level = None
self._disk_drive_ids = None
self._erase_secured_drives = None
self._name = None
@property
def raid_level(self):
"""
Gets the raid_level of this StoragePoolCreateRequest.
The RAID configuration for the new storage pool.
:return: The raid_level of this StoragePoolCreateRequest.
:rtype: str
:required/optional: required
"""
return self._raid_level
@raid_level.setter
def raid_level(self, raid_level):
"""
Sets the raid_level of this StoragePoolCreateRequest.
The RAID configuration for the new storage pool.
:param raid_level: The raid_level of this StoragePoolCreateRequest.
:type: str
"""
allowed_values = ["raidUnsupported", "raidAll", "raid0", "raid1", "raid3", "raid5", "raid6", "raidDiskPool", "__UNDEFINED"]
if raid_level not in allowed_values:
raise ValueError(
"Invalid value for `raid_level`, must be one of {0}"
.format(allowed_values)
)
self._raid_level = raid_level
@property
def disk_drive_ids(self):
"""
Gets the disk_drive_ids of this StoragePoolCreateRequest.
The identifiers of the disk drives to use for creating the storage pool.
:return: The disk_drive_ids of this StoragePoolCreateRequest.
:rtype: list[str]
:required/optional: required
"""
return self._disk_drive_ids
@disk_drive_ids.setter
def disk_drive_ids(self, disk_drive_ids):
"""
Sets the disk_drive_ids of this StoragePoolCreateRequest.
The identifiers of the disk drives to use for creating the storage pool.
:param disk_drive_ids: The disk_drive_ids of this StoragePoolCreateRequest.
:type: list[str]
"""
self._disk_drive_ids = disk_drive_ids
@property
def erase_secured_drives(self):
"""
Gets the erase_secured_drives of this StoragePoolCreateRequest.
Security-enabled drives that were previously part of a secured storage pool must be erased before they can be re-used. Enable to automatically erase such drives.
:return: The erase_secured_drives of this StoragePoolCreateRequest.
:rtype: bool
:required/optional: optional
"""
return self._erase_secured_drives
@erase_secured_drives.setter
def erase_secured_drives(self, erase_secured_drives):
"""
Sets the erase_secured_drives of this StoragePoolCreateRequest.
Security-enabled drives that were previously part of a secured storage pool must be erased before they can be re-used. Enable to automatically erase such drives.
:param erase_secured_drives: The erase_secured_drives of this StoragePoolCreateRequest.
:type: bool
"""
self._erase_secured_drives = erase_secured_drives
@property
def name(self):
"""
Gets the name of this StoragePoolCreateRequest.
The user-label to assign to the new storage pool.
:return: The name of this StoragePoolCreateRequest.
:rtype: str
:required/optional: required
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this StoragePoolCreateRequest.
The user-label to assign to the new storage pool.
:param name: The name of this StoragePoolCreateRequest.
:type: str
"""
self._name = name
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
if self is None:
return None
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if self is None or other is None:
return None
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 37.42381
| 844
| 0.640158
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.